hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ea6f78590bc071b958b247c093a75969d4e4c333
| 12,525
|
py
|
Python
|
tests/pygosolnp_input_validation_test.py
|
KristerSJakobsson/pygosolnp
|
5a890d67782ff04f521644daeaef2f7708959e79
|
[
"BSL-1.0"
] | null | null | null |
tests/pygosolnp_input_validation_test.py
|
KristerSJakobsson/pygosolnp
|
5a890d67782ff04f521644daeaef2f7708959e79
|
[
"BSL-1.0"
] | null | null | null |
tests/pygosolnp_input_validation_test.py
|
KristerSJakobsson/pygosolnp
|
5a890d67782ff04f521644daeaef2f7708959e79
|
[
"BSL-1.0"
] | null | null | null |
import itertools
import unittest
from pygosolnp.pygosolnp import solve
from pygosolnp.sampling import NormalDistribution
from .resources import alkyla_equality_function, parameter_lower_bounds, parameter_upper_bounds, \
alkyla_inequality_function, alkyla_objective_function, inequality_lower_bounds, inequality_upper_bounds, \
equality_values
class TestPygosolnpInputValidation(unittest.TestCase):
def test_bad_objective_function(self):
# Missing mandatory data
with self.assertRaises(ValueError):
solve(obj_func=None,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds)
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=None,
par_upper_limit=parameter_upper_bounds)
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=None)
# Non-callable objective function
with self.assertRaises(ValueError):
solve(obj_func={"hello": "world"},
par_lower_limit=[1, 2, 3],
par_upper_limit=parameter_upper_bounds)
# Lower bounds missing length attribute
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=2,
par_upper_limit=parameter_upper_bounds)
# Upper bounds missing length attribute
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=1515,
par_upper_limit=parameter_upper_bounds)
# Different length of bounds
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=[1, 2, 3],
par_upper_limit=parameter_upper_bounds)
# Random number distribution not same length as parameters
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
start_guess_sampling=[NormalDistribution(1, 0.1)])
def test_bad_eq_constraints(self):
# Eq function without values
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
start_guess_sampling=[NormalDistribution(1, 0.1)],
eq_func=alkyla_equality_function)
# Eq values without function
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
start_guess_sampling=[NormalDistribution(1, 0.1)],
eq_values=equality_values)
# Eq func not callable without function
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
start_guess_sampling=[NormalDistribution(1, 0.1)],
eq_func={"Not", "Callable"},
eq_values=equality_values)
# Eq values not of fixed length
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
start_guess_sampling=[NormalDistribution(1, 0.1)],
eq_func=alkyla_equality_function,
eq_values=2)
def test_bad_ineq_constraints(self):
# Ineq func without bounds
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function)
# Ineq lower bound without func or upper bound
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_lower_bounds=inequality_lower_bounds)
# Ineq func not callable
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=[1, 2, 3],
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds)
# Ineq lower bounds not of fixed size
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=itertools.cycle("abcd"),
ineq_upper_bounds=inequality_upper_bounds)
# Ineq upper bounds not of fixed size
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=itertools.cycle("efgh"))
# Ineq lower and upper bounds of different length
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=[1, 2, 3],
ineq_upper_bounds=inequality_upper_bounds)
def test_bad_pygosolnp_parameters(self):
# Needs strictly more than 0 simulations
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
number_of_simulations=0)
# Needs strictly more than 0 restarts
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
number_of_restarts=0)
# Needs strictly more than 0 processes
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
number_of_processes=0)
def test_bad_pysolnp_parameters(self):
# Various pysolnp parameters as wrong variable type
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_tolerance="a")
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_max_minor_iter="a")
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_max_major_iter="a")
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_rho="a")
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_delta="a")
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
debug="a")
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_max_minor_iter=0)
with self.assertRaises(ValueError):
solve(obj_func=alkyla_objective_function,
par_lower_limit=parameter_lower_bounds,
par_upper_limit=parameter_upper_bounds,
ineq_func=alkyla_inequality_function,
ineq_lower_bounds=inequality_lower_bounds,
ineq_upper_bounds=inequality_upper_bounds,
eq_func=alkyla_equality_function,
eq_values=equality_values,
pysolnp_max_major_iter=0)
| 45.711679
| 110
| 0.637764
| 1,281
| 12,525
| 5.771272
| 0.071819
| 0.092249
| 0.078453
| 0.117679
| 0.869742
| 0.859867
| 0.839849
| 0.823346
| 0.816583
| 0.816583
| 0
| 0.004755
| 0.311537
| 12,525
| 273
| 111
| 45.879121
| 0.852604
| 0.056766
| 0
| 0.818182
| 0
| 0
| 0.002968
| 0
| 0
| 0
| 0
| 0
| 0.131818
| 1
| 0.022727
| false
| 0
| 0.022727
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ea8e64c9daccd475f43f2703de1fee4287f1e6f2
| 14,154
|
py
|
Python
|
src/python/tensorflow_cloud/core/tests/unit/validate_test.py
|
SinaChavoshi/cloud
|
5ecfadd7f962e5b864e140fdba1324cc4bcf207f
|
[
"Apache-2.0"
] | null | null | null |
src/python/tensorflow_cloud/core/tests/unit/validate_test.py
|
SinaChavoshi/cloud
|
5ecfadd7f962e5b864e140fdba1324cc4bcf207f
|
[
"Apache-2.0"
] | null | null | null |
src/python/tensorflow_cloud/core/tests/unit/validate_test.py
|
SinaChavoshi/cloud
|
5ecfadd7f962e5b864e140fdba1324cc4bcf207f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the validation module."""
import mock
import os
import unittest
from tensorflow_cloud.core import machine_config
from tensorflow_cloud.core import validate
@mock.patch("os.path")
class TestValidate(unittest.TestCase):
def setup(self):
self.script_entry_point = "mnist_example_using_fit.py"
self.notebook_entry_point = "mnist_example_using_fit.ipynb"
self.requirements_file = "requirements.txt"
def test_valid_args(self, mock_os_path):
self.setup()
validate.validate(
entry_point=self.script_entry_point,
distribution_strategy="auto",
requirements_txt=self.requirements_file,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
validate.validate(
entry_point=self.script_entry_point,
distribution_strategy=None,
requirements_txt=self.requirements_file,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=None,
worker_count=0,
region="us-central1",
args=["1000"],
stream_logs=False,
docker_image_bucket_name=None,
called_from_notebook=False,
)
validate.validate(
entry_point=self.notebook_entry_point,
distribution_strategy=None,
requirements_txt=self.requirements_file,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=None,
worker_count=0,
region="us-central1",
args=["1000"],
stream_logs=False,
docker_image_bucket_name=None,
called_from_notebook=False,
)
validate.validate(
entry_point=None,
distribution_strategy=None,
requirements_txt=self.requirements_file,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=None,
worker_count=0,
region="us-central1",
args=["1000"],
stream_logs=False,
docker_image_bucket_name="abc",
called_from_notebook=True,
)
validate.validate(
entry_point=None,
distribution_strategy=None,
requirements_txt=self.requirements_file,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=None,
worker_count=0,
region="us-central1",
args=["1000"],
stream_logs=False,
docker_image_bucket_name="abc",
called_from_notebook=True,
job_labels={"a": "b"},
)
def test_invalid_entry_point(self, mock_os_path):
mock_os_path.isfile.return_value = False
with self.assertRaisesRegex(ValueError, r"Invalid `entry_point`"):
validate.validate(
entry_point="/mnist_example_using_fit.py",
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
with self.assertRaisesRegex(ValueError, r"Invalid `entry_point`"):
validate.validate(
entry_point="/mnist_example_using_fit.txt",
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_requirements_txt(self, mock_os_path):
mock_os_path.isfile.return_value = False
with self.assertRaisesRegex(ValueError, r"Invalid `requirements_txt`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt="temp.txt",
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_distribution_strategy(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `distribution_strategy`"):
validate.validate(
entry_point=None,
distribution_strategy="MirroredStrategy",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_chief_config(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `chief_config`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=None,
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_worker_config(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `worker_config`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=None,
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_worker_count(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `worker_count`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=-1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_region(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `region`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region=["us-region-a"],
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_args(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `entry_point_args`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args="1000",
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_stream_logs(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `stream_logs`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs="True",
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_cloud_bucket_name(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `docker_image_bucket_name`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=False,
docker_image_bucket_name=None,
called_from_notebook=True,
)
def test_invalid_tpu_chief_config(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `chief_config`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["TPU"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["K80_1X"],
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_tpu_worker_count(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid `worker_count`"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["CPU"],
worker_config=machine_config.COMMON_MACHINE_CONFIGS["TPU"],
worker_count=2,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
def test_invalid_tpu_accelerator_count(self, mock_os_path):
with self.assertRaisesRegex(ValueError, r"Invalid machine configuration"):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["CPU"],
worker_config=machine_config.MachineConfig(
accelerator_type=machine_config.AcceleratorType.TPU_V3
),
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
@mock.patch("tensorflow_cloud.core.validate.VERSION", "2.2.0")
def test_invalid_tpu_accelerator_tf_version(self, mock_os_path):
with self.assertRaisesRegex(
NotImplementedError, r"TPUs are only supported for TF version <= 2.1.0"
):
validate.validate(
entry_point=None,
distribution_strategy="auto",
requirements_txt=None,
chief_config=machine_config.COMMON_MACHINE_CONFIGS["CPU"],
worker_config=machine_config.MachineConfig(
accelerator_type=machine_config.AcceleratorType.TPU_V2,
accelerator_count=8,
),
worker_count=1,
region="us-central1",
args=None,
stream_logs=True,
docker_image_bucket_name=None,
called_from_notebook=False,
)
if __name__ == "__main__":
unittest.main()
| 39.758427
| 87
| 0.59114
| 1,430
| 14,154
| 5.493007
| 0.111189
| 0.061235
| 0.082241
| 0.101846
| 0.859962
| 0.844303
| 0.840484
| 0.824316
| 0.818969
| 0.818969
| 0
| 0.016539
| 0.329306
| 14,154
| 355
| 88
| 39.870423
| 0.810913
| 0.042603
| 0
| 0.752381
| 0
| 0
| 0.079805
| 0.014557
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.050794
| false
| 0
| 0.015873
| 0
| 0.069841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
577776aa374fa6004c15a305e760ea24128e2f8b
| 4,595
|
py
|
Python
|
calories/models.py
|
shawonAlam/Django-Calorie-tracker
|
c6713a4108882ad0838932c78cff1a3f15753510
|
[
"MIT"
] | null | null | null |
calories/models.py
|
shawonAlam/Django-Calorie-tracker
|
c6713a4108882ad0838932c78cff1a3f15753510
|
[
"MIT"
] | null | null | null |
calories/models.py
|
shawonAlam/Django-Calorie-tracker
|
c6713a4108882ad0838932c78cff1a3f15753510
|
[
"MIT"
] | null | null | null |
"""from django.db import models
from django.contrib.auth.models import User
from datetime import date
class Food(models.Model):
name = models.CharField(max_length=200, null=False)
quantity = models.PositiveIntegerField(null=False, default=0)
calorie = models.FloatField(null=False, default=0)
person_of = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Profile(models.Model):
person_of = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
calorie_count = models.FloatField(default=0, null=True, blank=True)
food_selected = models.ForeignKey(Food,
on_delete=models.CASCADE,
null=True,
blank=True)
quantity = models.FloatField(default=0)
total_calorie = models.FloatField(default=0, null=True)
date = models.DateField(auto_now_add=True)
calorie_goal = models.PositiveIntegerField(default=0)
all_food_selected_today = models.ManyToManyField(Food,
through='PostFood',
related_name='inventory')
def save(self, *args, **kwargs):
if self.food_selected != None:
self.amount = (self.food_selected.calorie/self.food_selected.quantity)
self.calorie_count = self.amount * self.quantity
self.total_calorie = self.calorie_count + self.total_calorie
calories = Profile.objects.filter(person_of=self.person_of).last()
PostFood.objects.create(profile=calories,
food=self.food_selected,
calorie_amount=self.calorie_count,
amount=self.quantity)
self.food_selected = None
super(Profile, self).save(*args, **kwargs)
else:
super(Profile, self).save(*args, **kwargs)
def __str__(self):
return str(self.person_of.username)
class PostFood(models.Model):
profile = models.ForeignKey(Profile, on_delete=models.CASCADE)
food = models.ForeignKey(Food, on_delete=models.CASCADE)
calorie_amount = models.FloatField(default=0, null=True, blank=True)
amount = models.FloatField(default=0)
"""
from django.db import models
from django.contrib.auth.models import User
from datetime import date
# Create your models here.
class Food(models.Model):
name = models.CharField(max_length=200, null=False)
quantity = models.PositiveIntegerField(null=False, default=0)
calorie = models.FloatField(null=False, default=0)
person_of = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Profile(models.Model):
person_of = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
calorie_count = models.FloatField(default=0, null=True, blank=True)
food_selected = models.ForeignKey(Food, on_delete=models.CASCADE, null=True, blank=True)
quantity = models.FloatField(default=0)
total_calorie = models.FloatField(default=0, null=True)
date = models.DateField(auto_now_add=True)
calorie_goal = models.PositiveIntegerField(default=0, null=True)
all_food_selected_today = models.ManyToManyField(Food, through='PostFood', related_name='inventory')
def save(self, *args, **kwargs): # new
if self.food_selected != None:
self.amount = (self.food_selected.calorie / self.food_selected.quantity)
self.calorie_count = self.amount * self.quantity
self.total_calorie = self.calorie_count + self.total_calorie
calories = Profile.objects.filter(person_of=self.person_of).last()
PostFood.objects.create(profile=calories, food=self.food_selected, calorie_amount=self.calorie_count,
amount=self.quantity)
self.food_selected = None
super(Profile, self).save(*args, **kwargs)
else:
super(Profile, self).save(*args, **kwargs)
def __str__(self):
return str(self.person_of.username)
class PostFood(models.Model):
profile = models.ForeignKey(Profile, on_delete=models.CASCADE)
food = models.ForeignKey(Food, on_delete=models.CASCADE)
calorie_amount = models.FloatField(default=0, null=True, blank=True)
amount = models.FloatField(default=0)
| 41.772727
| 114
| 0.642655
| 525
| 4,595
| 5.468571
| 0.135238
| 0.044584
| 0.048764
| 0.073145
| 0.989202
| 0.989202
| 0.989202
| 0.989202
| 0.989202
| 0.989202
| 0
| 0.00644
| 0.256583
| 4,595
| 110
| 115
| 41.772727
| 0.834016
| 0.520348
| 0
| 0.157895
| 0
| 0
| 0.008142
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.078947
| 0.052632
| 0.710526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
57867de14c1d5eb2d435f2c6eeb097f22ad38e71
| 143
|
py
|
Python
|
project_checker/checker/buildservice/__init__.py
|
zuzannnaobajtek/github-cmake-project-checker
|
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
|
[
"MIT"
] | 1
|
2017-05-17T21:21:54.000Z
|
2017-05-17T21:21:54.000Z
|
project_checker/checker/buildservice/__init__.py
|
zuzannnaobajtek/github-cmake-project-checker
|
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
|
[
"MIT"
] | 13
|
2018-03-28T15:36:17.000Z
|
2018-04-25T16:44:00.000Z
|
project_checker/checker/buildservice/__init__.py
|
zuzannnaobajtek/github-cmake-project-checker
|
1406c2247bbbecb490bc5000c7fa521b9bf96ec0
|
[
"MIT"
] | 15
|
2017-05-31T11:44:20.000Z
|
2018-04-19T15:03:35.000Z
|
from project_checker.checker.buildservice.cmakeservice import Target
from project_checker.checker.buildservice.cmakeservice import CMakeService
| 71.5
| 74
| 0.909091
| 16
| 143
| 8
| 0.4375
| 0.171875
| 0.28125
| 0.390625
| 0.859375
| 0.859375
| 0.859375
| 0
| 0
| 0
| 0
| 0
| 0.048951
| 143
| 2
| 74
| 71.5
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
578902c5918a9aa68db770000968a3307cdeafb2
| 2,996
|
py
|
Python
|
migrations/versions/77442fc8e343_managedresults_uses_text_fields.py
|
louking/rrwebapp
|
5c73f84e1a21bc3b5fa51d83ba576c3152e6cf27
|
[
"Apache-2.0"
] | null | null | null |
migrations/versions/77442fc8e343_managedresults_uses_text_fields.py
|
louking/rrwebapp
|
5c73f84e1a21bc3b5fa51d83ba576c3152e6cf27
|
[
"Apache-2.0"
] | 417
|
2015-05-07T16:50:22.000Z
|
2022-03-14T16:16:13.000Z
|
migrations/versions/77442fc8e343_managedresults_uses_text_fields.py
|
louking/rrwebapp
|
5c73f84e1a21bc3b5fa51d83ba576c3152e6cf27
|
[
"Apache-2.0"
] | null | null | null |
"""ManagedResults uses Text fields
Revision ID: 77442fc8e343
Revises: bd228893783a
Create Date: 2021-07-14 06:41:04.343278
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '77442fc8e343'
down_revision = 'bd228893783a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('managedresult', 'city',
existing_type=mysql.VARCHAR(length=50),
type_=sa.Text(),
existing_nullable=True)
op.alter_column('managedresult', 'club',
existing_type=mysql.VARCHAR(length=20),
type_=sa.Text(),
existing_nullable=True)
op.alter_column('managedresult', 'fname',
existing_type=mysql.VARCHAR(length=50),
type_=sa.Text(),
existing_nullable=True)
op.alter_column('managedresult', 'hometown',
existing_type=mysql.VARCHAR(length=50),
type_=sa.Text(),
existing_nullable=True)
op.alter_column('managedresult', 'lname',
existing_type=mysql.VARCHAR(length=50),
type_=sa.Text(),
existing_nullable=True)
op.alter_column('managedresult', 'name',
existing_type=mysql.VARCHAR(length=50),
type_=sa.Text(),
existing_nullable=True)
op.alter_column('managedresult', 'state',
existing_type=mysql.VARCHAR(length=2),
type_=sa.Text(),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('managedresult', 'state',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=2),
existing_nullable=True)
op.alter_column('managedresult', 'name',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=50),
existing_nullable=True)
op.alter_column('managedresult', 'lname',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=50),
existing_nullable=True)
op.alter_column('managedresult', 'hometown',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=50),
existing_nullable=True)
op.alter_column('managedresult', 'fname',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=50),
existing_nullable=True)
op.alter_column('managedresult', 'club',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=20),
existing_nullable=True)
op.alter_column('managedresult', 'city',
existing_type=sa.Text(),
type_=mysql.VARCHAR(length=50),
existing_nullable=True)
# ### end Alembic commands ###
| 36.096386
| 65
| 0.593124
| 309
| 2,996
| 5.559871
| 0.20712
| 0.057043
| 0.105937
| 0.211874
| 0.826542
| 0.802095
| 0.763679
| 0.719441
| 0.67986
| 0.67986
| 0
| 0.038407
| 0.287383
| 2,996
| 82
| 66
| 36.536585
| 0.766276
| 0.104473
| 0
| 0.8
| 0
| 0
| 0.104308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030769
| false
| 0
| 0.046154
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57ba282a6429f885026672fef2c4876939506e59
| 6,991
|
py
|
Python
|
app/models.py
|
CirculusVCFB/example-fastapi
|
87a9ba0c7db245326e3aa9b952585a440ba2327c
|
[
"bzip2-1.0.6"
] | null | null | null |
app/models.py
|
CirculusVCFB/example-fastapi
|
87a9ba0c7db245326e3aa9b952585a440ba2327c
|
[
"bzip2-1.0.6"
] | 1
|
2022-03-01T07:02:47.000Z
|
2022-03-01T07:02:47.000Z
|
app/models.py
|
CirculusVCFB/example-fastapi
|
87a9ba0c7db245326e3aa9b952585a440ba2327c
|
[
"bzip2-1.0.6"
] | null | null | null |
from .database import Base
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from .database import Base
from sqlalchemy.sql.sqltypes import TIMESTAMP
from sqlalchemy.sql.expression import text
class Post(Base):
__tablename__ = "posts"
id = Column(Integer, primary_key=True, nullable=False, index=True)
title = Column(String(1000), nullable=False,)
content = Column(String(1000), nullable=False)
published = Column(Boolean, default = True)
created_at = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
user_id = Column(Integer, ForeignKey("users.id", ondelete = "CASCADE"), nullable = False)
owner = relationship("User")
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True, nullable=False)
email = Column(String(100), unique=True, nullable=False)
password = Column(String(100))
is_active = Column(Boolean, default=True)
created_at = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
class Vote(Base):
__tablename__ = "votes"
user_id = Column(Integer, ForeignKey("users.id", ondelete = "CASCADE"), primary_key = True)
post_id = Column(Integer, ForeignKey("posts.id", ondelete = "CASCADE"), primary_key = True)
class Alembic(Base):
__tablename__ = "alembic"
id = Column(Integer, primary_key=True, nullable=False, index=True)
title = Column(String(1000), nullable=False,)
content = Column(String(1000), nullable=False)
published = Column(Boolean, default = True)
created_at = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
user_id = Column(Integer, ForeignKey("users.id", ondelete = "CASCADE"), nullable = False)
owner = relationship("User")
class Prizetablek10(Base):
__tablename__ = "k10"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
started = Column(Boolean)
completed = Column(Boolean)
withdrawn = Column(Boolean)
class Prizetablek15(Base):
__tablename__ = "k15"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek20(Base):
__tablename__ = "k20"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek25(Base):
__tablename__ = "k25"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek50(Base):
__tablename__ = "k50"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek60(Base):
__tablename__ = "k60"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek70(Base):
__tablename__ = "k70"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek80(Base):
__tablename__ = "k80"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek100(Base):
__tablename__ = "k100"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek150(Base):
__tablename__ = "k150"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek200(Base):
__tablename__ = "k200"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek250(Base):
__tablename__ = "k250"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek300(Base):
__tablename__ = "k300"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek400(Base):
__tablename__ = "k400"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek500(Base):
__tablename__ = "k500"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
class Prizetablek600(Base):
__tablename__ = "k600"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Integer)
lastupdated = Column(TIMESTAMP(timezone = True ), nullable = False, server_default = text('now()'))
username = Column(String(1000))
status = Column(Boolean)
activated = Column(Boolean)
| 36.222798
| 102
| 0.733658
| 824
| 6,991
| 6.069175
| 0.11165
| 0.10138
| 0.068986
| 0.083583
| 0.816437
| 0.816437
| 0.793041
| 0.793041
| 0.785443
| 0.775245
| 0
| 0.02737
| 0.132456
| 6,991
| 193
| 103
| 36.222798
| 0.797197
| 0
| 0
| 0.680723
| 0
| 0
| 0.034468
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.006024
| 0.036145
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
57be85e1363d3667d01723ead5a9fd73ed6aeda2
| 5,792
|
py
|
Python
|
build/lib/pytorch_rl/networks.py
|
himanshusahni/Pytorch-RL
|
0d49f6e627f6e3449af2f958b3b03f41f973f355
|
[
"MIT"
] | null | null | null |
build/lib/pytorch_rl/networks.py
|
himanshusahni/Pytorch-RL
|
0d49f6e627f6e3449af2f958b3b03f41f973f355
|
[
"MIT"
] | null | null | null |
build/lib/pytorch_rl/networks.py
|
himanshusahni/Pytorch-RL
|
0d49f6e627f6e3449af2f958b3b03f41f973f355
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
class ConvPolicy_64x64(nn.Module):
"""policy network with a convolutional head best for 64x64 images"""
def __init__(self, nb_actions):
super(ConvPolicy_64x64, self).__init__()
self.conv1 = nn.Conv2d(3, 8, 5, stride=1, padding=2) # 64x64x8
self.conv2 = nn.Conv2d(8, 16, 4, stride=2, padding=1) # 32x32x16
self.conv3 = nn.Conv2d(16, 32, 4, stride=2, padding=1) # 16x16x32
self.conv4 = nn.Conv2d(32, 64, 4, stride=2, padding=1) # 8x8x64
self.conv5 = nn.Conv2d(64, 64, 4, stride=2, padding=1) # 4x4x64
self.conv6 = nn.Conv2d(64, 64, 4, stride=2, padding=1) # 2x2x64
self.conv_layers = [self.conv1, self.conv2, self.conv3,
self.conv4, self.conv5, self.conv6]
self.fc1 = nn.Linear(256, 256)
self.fc2 = nn.Linear(256, nb_actions)
self.print_info()
def print_info(self):
print("Initializing conv64x64 policy network!")
print(self)
conv_params = sum([sum([p.numel() for p in l.parameters()]) for l in self.conv_layers])
print("Convolutional Params: {}".format(conv_params))
fc_params = sum([sum([p.numel() for p in l.parameters()]) for l in [self.fc1, self.fc2]])
print("FC params: {}".format(fc_params))
print("Total trainable params: {}".format(sum([p.numel() for p in self.parameters() if p.requires_grad])))
def forward(self, x):
for l in self.conv_layers:
x = F.leaky_relu(l(x), 0.2)
x = F.leaky_relu(self.fc1(x.view(-1, 256)))
return self.fc2(x)
class ConvValue_64x64(nn.Module):
"""Value network with a convolutional head for 64x64 images"""
def __init__(self):
super(ConvValue_64x64, self).__init__()
self.conv1 = nn.Conv2d(3, 8, 5, stride=1, padding=2) # 64x64x8
self.conv2 = nn.Conv2d(8, 16, 4, stride=2, padding=1) # 32x32x16
self.conv3 = nn.Conv2d(16, 32, 4, stride=2, padding=1) # 16x16x32
self.conv4 = nn.Conv2d(32, 64, 4, stride=2, padding=1) # 8x8x64
self.conv5 = nn.Conv2d(64, 64, 4, stride=2, padding=1) # 4x4x64
self.conv6 = nn.Conv2d(64, 64, 4, stride=2, padding=1) # 2x2x64
self.conv_layers = [self.conv1, self.conv2, self.conv3,
self.conv4, self.conv5, self.conv6]
self.fc1 = nn.Linear(256, 256)
self.fc2 = nn.Linear(256, 1)
self.print_info()
def print_info(self):
print("Initializing conv64x64 value network!")
print(self)
conv_params = sum([sum([p.numel() for p in l.parameters()]) for l in self.conv_layers])
print("Convolutional Params: {}".format(conv_params))
fc_params = sum([sum([p.numel() for p in l.parameters()]) for l in [self.fc1, self.fc2]])
print("FC params: {}".format(fc_params))
print("Total trainable params: {}".format(sum([p.numel() for p in self.parameters() if p.requires_grad])))
def forward(self, x):
for l in self.conv_layers:
x = F.leaky_relu(l(x), 0.2)
x = F.leaky_relu(self.fc1(x.view(-1, 256)))
return self.fc2(x)
class FCPolicy(nn.Module):
"""fully connected policy network"""
def __init__(self, state_size, nb_actions):
super(FCPolicy, self).__init__()
self.fc1 = nn.Linear(state_size, 256)
self.fc2 = nn.Linear(256, 256)
self.fc3 = nn.Linear(256, nb_actions)
self.print_info()
def print_info(self):
print("Initializing fully connected policy network!")
print(self)
fc_params = sum([sum([p.numel() for p in l.parameters()]) for l in [self.fc1, self.fc2, self.fc3]])
print("FC params: {}".format(fc_params))
print("Total trainable params: {}".format(sum([p.numel() for p in self.parameters() if p.requires_grad])))
def forward(self, x):
x = F.leaky_relu(self.fc1(x))
x = F.leaky_relu(self.fc2(x))
return self.fc3(x)
class FCValue(nn.Module):
"""fully connected value network"""
def __init__(self, state_size):
super(FCValue, self).__init__()
self.fc1 = nn.Linear(state_size, 256)
self.fc2 = nn.Linear(256, 256)
self.fc3 = nn.Linear(256, 1)
self.print_info()
def print_info(self):
print("Initializing fully connected policy network!")
print(self)
fc_params = sum([sum([p.numel() for p in l.parameters()]) for l in [self.fc1, self.fc2, self.fc3]])
print("FC params: {}".format(fc_params))
print("Total trainable params: {}".format(sum([p.numel() for p in self.parameters() if p.requires_grad])))
def forward(self, x):
x = F.leaky_relu(self.fc1(x))
x = F.leaky_relu(self.fc2(x))
return self.fc3(x)
class FCQValueContinuous(nn.Module):
"""fully connected q value network"""
def __init__(self, state_size, nb_actions):
super(FCQValueContinuous, self).__init__()
self.fc1 = nn.Linear(state_size, 128)
self.fca = nn.Linear(nb_actions, 128)
self.fc2 = nn.Linear(256, 256)
self.fc3 = nn.Linear(256, 1)
self.print_info()
def print_info(self):
print("Initializing fully connected policy network!")
print(self)
fc_params = sum([sum([p.numel() for p in l.parameters()]) for l in [self.fc1, self.fc2, self.fc3]])
print("FC params: {}".format(fc_params))
print("Total trainable params: {}".format(sum([p.numel() for p in self.parameters() if p.requires_grad])))
def forward(self, x, a):
x = F.leaky_relu(self.fc1(x))
a = F.leaky_relu(self.fca(a))
x = torch.cat((x, a), dim=-1)
x = F.leaky_relu(self.fc2(x))
return self.fc3(x)
| 40.788732
| 114
| 0.606181
| 855
| 5,792
| 3.987135
| 0.111111
| 0.030801
| 0.031681
| 0.042241
| 0.88941
| 0.872397
| 0.85773
| 0.841302
| 0.831916
| 0.807862
| 0
| 0.071982
| 0.242058
| 5,792
| 141
| 115
| 41.078014
| 0.704556
| 0.053004
| 0
| 0.765766
| 0
| 0
| 0.082599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135135
| false
| 0
| 0.027027
| 0
| 0.252252
| 0.288288
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57dafc0acc8cdceea9492a1a964f9b693a0ecae1
| 88,623
|
py
|
Python
|
devilry/devilry_student/tests/test_group/test_projectgroupapp.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T22:56:23.000Z
|
2020-11-10T21:28:27.000Z
|
devilry/devilry_student/tests/test_group/test_projectgroupapp.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 786
|
2015-01-06T16:10:18.000Z
|
2022-03-16T11:10:50.000Z
|
devilry/devilry_student/tests/test_group/test_projectgroupapp.py
|
devilry/devilry-django
|
9ae28e462dfa4cfee966ebacbca04ade9627e715
|
[
"BSD-3-Clause"
] | 15
|
2015-04-06T06:18:43.000Z
|
2021-02-24T12:28:30.000Z
|
import mock
from django.contrib import messages
from django.http import Http404
from django.test import TestCase
from django.utils import timezone
from django.utils.timezone import datetime, timedelta
from cradmin_legacy import cradmin_testhelpers
from model_bakery import baker
from devilry.apps.core import devilry_core_baker_factories as core_baker
from devilry.apps.core.models import AssignmentGroup
from devilry.apps.core.models import GroupInvite
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
from devilry.devilry_student.views.group import projectgroupapp
class TestProjectGroupOverviewView(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = projectgroupapp.ProjectGroupOverviewView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def __mockinstance_with_devilryrole(self, devilryrole):
mockinstance = mock.MagicMock()
mockinstance.get_devilryrole_for_requestuser.return_value = devilryrole
return mockinstance
def test_title(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertIn(
'Project group',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertIn(
'Project group',
mockresponse.selector.one('h1').alltext_normalized)
def test_inner_header_p(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertIn(
'{} - {} - {}'.format(testassignment.long_name,
testassignment.parentnode.parentnode.long_name,
testassignment.parentnode.long_name),
mockresponse.selector.one('.cradmin-legacy-page-header-inner > p').alltext_normalized
)
def test_group_members_ul_exists(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
core_baker.candidate(group=group, fullname="Dewey Duck", shortname="dewey@example.com")
core_baker.candidate(group=group, fullname="Huey Duck", shortname="huey@example.com")
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertTrue(mockresponse.selector.exists('#devilry_student_projectgroup_overview_already_in_group > ul'))
def test_group_project_members_displayname(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group, fullname="Dewey Duck", shortname="dewey@example.com")
candidate2 = core_baker.candidate(group=group, fullname="Huey Duck", shortname="huey@example.com")
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
candidate_list = [
cand.alltext_normalized
for cand in mockresponse.selector.list('#devilry_student_projectgroup_overview_already_in_group > ul > li')]
self.assertEqual(3, len(candidate_list))
self.assertIn(candidate.relatedstudent.user.get_displayname(), candidate_list)
self.assertIn(candidate1.relatedstudent.user.get_displayname(), candidate_list)
self.assertIn(candidate2.relatedstudent.user.get_displayname(), candidate_list)
def test_links(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertEqual(1, len(mockresponse.request.cradmin_instance.reverse_url.call_args_list))
self.assertEqual(
mock.call(appname='feedbackfeed', args=(), kwargs={}, viewname='INDEX'),
mockresponse.request.cradmin_instance.reverse_url.call_args_list[0]
)
class TestProjectGroupOverviewViewStudentsCannotCreateGroups(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = projectgroupapp.ProjectGroupOverviewView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def __mockinstance_with_devilryrole(self, devilryrole):
mockinstance = mock.MagicMock()
mockinstance.get_devilryrole_for_requestuser.return_value = devilryrole
return mockinstance
def test_submit_button_sutdents_cannot_create_groups(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertFalse(mockresponse.selector.exists('#submit-id-submit'))
def test_submit_button_students_cannot_create_groups_expired(self):
group = baker.make('core.AssignmentGroup',
parentnode__students_can_create_groups=True,
parentnode__students_can_not_create_groups_after=timezone.now() - timedelta(days=10))
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertFalse(mockresponse.selector.exists('#submit-id-submit'))
def test_invite_box_does_not_exists(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertFalse(mockresponse.selector.exists('#devilry_student_projectgroupoverview_invitebox'))
def test_waiting_for_response_form_does_not_exists(self):
group = baker.make('core.AssignmentGroup')
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertFalse(mockresponse.selector.exists('#devilry_student_projectgroup_overview_waiting_for_response_from'))
def test_cannot_invite_student_to_group(self):
test_assignment = baker.make('core.Assignment')
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
self.mock_http200_postrequest_htmls(
requestuser=candidate.relatedstudent.user,
cradmin_role=group,
requestkwargs={
'data': {'sent_to': candidate1.id}
}
)
self.assertFalse(GroupInvite.objects.filter(group=group, sent_to=candidate1.relatedstudent.user).exists())
def test_received_invite_cannot_create_group(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=False)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user)
self.assertFalse(mockresponse.selector.exists('.alert.alert-success'))
self.assertFalse(mockresponse.selector.exists('.btn.btn-default'))
def test_received_invite_cannot_create_group_expired(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
students_can_not_create_groups_after=timezone.now() - timedelta(days=1)
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user)
self.assertFalse(mockresponse.selector.exists('.alert.alert-success'))
self.assertFalse(mockresponse.selector.exists('.btn.btn-default'))
class TestProjectGroupOverviewViewStudentsCanCreateGroups(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = projectgroupapp.ProjectGroupOverviewView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def __mockinstance_with_devilryrole(self, devilryrole):
mockinstance = mock.MagicMock()
mockinstance.get_devilryrole_for_requestuser.return_value = devilryrole
return mockinstance
def test_submit_button_visible_when_students_can_create(self):
group = baker.make('core.AssignmentGroup',
parentnode__students_can_create_groups=True)
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertTrue(mockresponse.selector.exists('#submit-id-submit'))
def test_invite_box_exists(self):
group = baker.make('core.AssignmentGroup',
parentnode__students_can_create_groups=True)
candidate = core_baker.candidate(group=group)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertTrue(mockresponse.selector.exists('#devilry_student_projectgroupoverview_invitebox'))
def test_invite_box_correct_students_is_shown(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group2 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
candidate2 = core_baker.candidate(group=group2, fullname="Huey Duck", shortname="huey@example.com")
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
selectlist = [elem.alltext_normalized for elem in mockresponse.selector.list('#id_sent_to > option')]
self.assertNotIn(candidate.relatedstudent.user.get_displayname(), selectlist)
self.assertIn(candidate1.relatedstudent.user.get_displayname(), selectlist)
self.assertIn(candidate2.relatedstudent.user.get_displayname(), selectlist)
def test_invite_student_to_group(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
requestuser=candidate.relatedstudent.user,
cradmin_role=group,
messagesmock=messagesmock,
requestkwargs={
'data': {'sent_to': candidate1.id}
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Invite sent to {}.'.format(candidate1.relatedstudent.user.get_displayname()),
''
)
def test_invite_student_to_group_db(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
requestuser=candidate.relatedstudent.user,
cradmin_role=group,
messagesmock=messagesmock,
requestkwargs={
'data': {'sent_to': candidate1.id}
}
)
self.assertTrue(GroupInvite.objects.filter(group=group, sent_to=candidate1.relatedstudent.user).exists())
def test_selected_choice_is_not_valid(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group, fullname="Dewey Duck", shortname="dewey@example.com")
messagesmock = mock.MagicMock()
self.mock_http200_postrequest_htmls(
requestuser=candidate.relatedstudent.user,
cradmin_role=group,
messagesmock=messagesmock,
requestkwargs={
'data': {'sent_to': candidate1.id}
}
)
def test_waiting_for_response_from_names(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group2 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
candidate2 = core_baker.candidate(group=group2, fullname="Huey Duck", shortname="huey@example.com")
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate2.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
selectlist = [elem.alltext_normalized for elem in mockresponse.selector.list('.invite_sent_to_displayname')]
self.assertNotIn(candidate.relatedstudent.user.get_displayname(), selectlist)
self.assertIn(candidate1.relatedstudent.user.get_displayname(), selectlist)
self.assertIn(candidate2.relatedstudent.user.get_displayname(), selectlist)
def test_waiting_for_response_from_invited_by(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group2 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate3 = core_baker.candidate(group=group, fullname="Louie Duck", shortname="louie@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
candidate2 = core_baker.candidate(group=group2, fullname="Huey Duck", shortname="huey@example.com")
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
baker.make('core.GroupInvite', group=group,
sent_by=candidate3.relatedstudent.user, sent_to=candidate2.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
selectlist = [elem.alltext_normalized for elem in mockresponse.selector.list('.invited_sent_by_displayname')]
self.assertIn(candidate.relatedstudent.user.get_displayname(), selectlist)
self.assertIn(candidate3.relatedstudent.user.get_displayname(), selectlist)
self.assertNotIn(candidate1.relatedstudent.user.get_displayname(), selectlist)
self.assertNotIn(candidate2.relatedstudent.user.get_displayname(), selectlist)
def test_waiting_for_response_delete_button(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group2 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
candidate2 = core_baker.candidate(group=group2, fullname="Huey Duck", shortname="huey@example.com")
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate2.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
self.assertEqual(len(mockresponse.selector.list('.btn.btn-danger.btn-xs')), 2)
def test_received_invite(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(cradmin_role=group1,
requestuser=candidate1.relatedstudent.user)
self.assertIn(
'You have been invited to join a group! {} invited you to join their group.'.format(
candidate.relatedstudent.user.get_displayname()),
mockresponse.selector.one('.alert.alert-success').alltext_normalized)
self.assertIn(
'More info',
mockresponse.selector.one('.btn.btn-default').alltext_normalized)
def test_get_num_queries(self):
group = baker.make('core.AssignmentGroup',
parentnode__students_can_create_groups=True)
candidate = core_baker.candidate(group=group)
with self.assertNumQueries(4):
self.mock_http200_getrequest_htmls(cradmin_role=group, requestuser=candidate.relatedstudent.user)
class TestGroupInviteRespondView(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = projectgroupapp.GroupInviteRespondView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_title(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Respond to group invite',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Respond to group invite',
mockresponse.selector.one('h1').alltext_normalized)
def test_inner_header_p(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'{} - {} - {}'.format(testassignment.long_name,
testassignment.parentnode.parentnode.long_name,
testassignment.parentnode.long_name),
mockresponse.selector.one('.cradmin-legacy-page-header-inner > .container > p').alltext_normalized
)
def test_form_text(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010',
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'You have been invited by {} to join their project group for {} {}.'.format(
candidate.relatedstudent.user.get_full_name(),
testassignment.subject.long_name,
testassignment.long_name
),
mockresponse.selector.one(
'form > p').alltext_normalized
)
def test_decline_button(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010',
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Decline invitation',
mockresponse.selector.one('.btn.btn-danger').alltext_normalized
)
def test_accept_button(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010',
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Accept invitation',
mockresponse.selector.one('.btn.btn-success').alltext_normalized
)
def test_links(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertEqual(1, len(mockresponse.request.cradmin_instance.reverse_url.call_args_list))
self.assertEqual(
mock.call(appname='projectgroup', args=(), kwargs={}, viewname='INDEX'),
mockresponse.request.cradmin_instance.reverse_url.call_args_list[0]
)
def test_already_part_of_group_with_more_than_one_student(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user)
core_baker.candidate(group=group1)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'You are already part of a group with more than one student!',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
def test_decline_invitation_message(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
messagesmock=messagesmock,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'decline_invite': ''
}
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Declined group invitation from {}.'.format(candidate.relatedstudent.user.get_displayname()),
''
)
def test_decline_invitation_db(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
self.mock_http302_postrequest(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'decline_invite': ''
}
}
)
self.assertFalse(GroupInvite.objects.get(id=invite.id).accepted)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertEqual(AssignmentGroup.objects.get(id=group.id).cached_data.candidate_count, 1)
self.assertEqual(AssignmentGroup.objects.get(id=group1.id).cached_data.candidate_count, 1)
def test_decline_accept_message(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
messagesmock=messagesmock,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Joined the group by invitation from {}.'.format(candidate.relatedstudent.user.get_displayname()),
''
)
def test_accept_invitation_db(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
self.mock_http302_postrequest(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertTrue(GroupInvite.objects.get(id=invite.id).accepted)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertFalse(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertEqual(AssignmentGroup.objects.get(id=group.id).cached_data.candidate_count, 2)
def test_accept_allready_part_of_a_group_with_more_than_one_student(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
core_baker.candidate(group=group1)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
messagesmock=messagesmock,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
messagesmock.add.assert_called_once_with(
messages.WARNING,
'The invited student is already in a project group with more than 1 students.',
''
)
def test_get_404_invite_has_already_been_accepted(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user, accepted=True)
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_get_404_invite_has_already_been_declined(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user, accepted=False)
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_404_student_can_no_longer_invite(self):
testassignment = baker.make('core.Assignment',
students_can_create_groups=True,
students_can_not_create_groups_after=timezone.now() - timedelta(days=1))
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user, accepted=False)
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_404_students_cannot_create_groups(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user, accepted=False)
testassignment.students_can_create_groups = False
testassignment.save()
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
cradmin_role=group1,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_get_num_queries(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
with self.assertNumQueries(3):
self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
class TestGroupInviteDeleteView(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = projectgroupapp.GroupInviteDeleteView
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_title(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Delete group invite',
mockresponse.selector.one('title').alltext_normalized)
def test_h1(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Delete group invite',
mockresponse.selector.one('h1').alltext_normalized)
def test_form_text(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Are you sure you want to delete the invite for {}?'.format(invite.sent_to.get_full_name()),
mockresponse.selector.one('form > p').alltext_normalized
)
def test_header_inner_p(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'{} - {} - {}'.format(
testassignment.long_name,
testassignment.subject.long_name,
testassignment.period.long_name
),
mockresponse.selector.one(
'.cradmin-legacy-page-header-inner > p').alltext_normalized
)
def test_delete_button(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Delete invite',
mockresponse.selector.one('.btn.btn-danger').alltext_normalized
)
def test_links(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertEqual(2, len(mockresponse.request.cradmin_instance.reverse_url.call_args_list))
self.assertEqual(
mock.call(appname='projectgroup', args=(), kwargs={}, viewname='INDEX'),
mockresponse.request.cradmin_instance.reverse_url.call_args_list[0]
)
self.assertEqual(
mock.call(appname='projectgroup', args=(), kwargs={}, viewname='INDEX'),
mockresponse.request.cradmin_instance.reverse_url.call_args_list[1]
)
def test_delete_invitation_message(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group,
messagesmock=messagesmock,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Removed project group invitation {}.'.format(candidate1.relatedstudent.user.get_displayname()),
''
)
def test_delete_invitation_db(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group,
messagesmock=messagesmock,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertFalse(GroupInvite.objects.filter(id=invite.id).exists())
def test_delete_invitation_by_another_user_in_group_message(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate2 = core_baker.candidate(group=group, fullname="Donald Duck", shortname="donald@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group,
messagesmock=messagesmock,
requestuser=candidate2.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Removed project group invitation {}.'.format(candidate1.relatedstudent.user.get_displayname()),
''
)
def test_delete_invitation_by_another_user_in_group_db(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate2 = core_baker.candidate(group=group, fullname="Donald Duck", shortname="donald@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
cradmin_role=group,
messagesmock=messagesmock,
requestuser=candidate2.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertFalse(GroupInvite.objects.filter(id=invite.id).exists())
def test_delete_invitation_by_a_user_not_in_group_404(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
group2 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate2 = core_baker.candidate(group=group2, fullname="Donald Duck", shortname="donald@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
with self.assertRaises(Http404):
self.mock_http302_postrequest(
cradmin_role=group,
messagesmock=messagesmock,
requestuser=candidate2.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_get_invitation_by_a_user_not_in_group_404(self):
testassignment = baker.make(
'core.Assignment',
long_name='Assignment 1',
parentnode__long_name='Spring 2017',
parentnode__parentnode__long_name='Duck1010'
)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
group2 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate2 = core_baker.candidate(group=group2, fullname="Donald Duck", shortname="donald@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
cradmin_role=group,
messagesmock=messagesmock,
requestuser=candidate2.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_get_num_queries(self):
testassignment = baker.make('core.Assignment')
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
with self.assertNumQueries(2):
self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
class TestGroupInviteRespondViewStandalone(TestCase, cradmin_testhelpers.TestCaseMixin):
viewclass = projectgroupapp.GroupInviteRespondViewStandalone
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_title(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Respond to group invite',
mockresponse.selector.one('title').alltext_normalized)
def test_page_header_h1(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Respond to group invite',
mockresponse.selector.one('.page-header > .container > h1').alltext_normalized)
def test_page_header_p(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'{} - {} - {}'.format(
test_assignment.long_name,
test_assignment.subject.long_name,
test_assignment.period.long_name
),
mockresponse.selector.one('.page-header > .container > p').alltext_normalized)
def test_form_text(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'You have been invited by {} to join their project group for {} {}.'.format(
candidate.relatedstudent.user.get_displayname(),
test_assignment.subject.long_name,
test_assignment.long_name),
mockresponse.selector.one('form').alltext_normalized)
def test_decline_button(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Decline invitation',
mockresponse.selector.one('.btn.btn-danger').alltext_normalized
)
def test_accept_button(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'Accept invitation',
mockresponse.selector.one('.btn.btn-success').alltext_normalized
)
def test_user_is_not_logged_in(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
viewkwargs={
'invite_id': invite.id
}
)
def test_404_can_not_view_another_users_invite(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
with self.assertRaises(Http404):
self.mock_http200_getrequest_htmls(
requestuser=candidate.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_already_in_group_with_more_than_one_student(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
core_baker.candidate(group=group1, fullname="Donald Duck", shortname="donald@example.com")
mockresponse = self.mock_http200_getrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
self.assertIn(
'You are already part of a group with more than one student!',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
def test_post_already_in_group_with_more_than_one_student(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
core_baker.candidate(group=group1, fullname="Donald Duck", shortname="donald@example.com")
mockresponse = self.mock_http200_postrequest_htmls(
cradmin_role=group,
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertIn(
'You are already part of a group with more than one student!',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertIsNone(GroupInvite.objects.get(id=invite.id).accepted)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertEqual(AssignmentGroup.objects.get(id=group1.id).cached_data.candidate_count, 2)
self.assertEqual(AssignmentGroup.objects.get(id=group.id).cached_data.candidate_count, 1)
def test_accept_invitation_message(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
requestuser=candidate1.relatedstudent.user,
messagesmock=messagesmock,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Joined the group by invitation from {}.'.format(candidate.relatedstudent.user.get_displayname()),
''
)
def test_accept_invitation_db(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
self.mock_http302_postrequest(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertTrue(GroupInvite.objects.get(id=invite.id).accepted)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertFalse(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertEqual(AssignmentGroup.objects.get(id=group.id).cached_data.candidate_count, 2)
def test_decline_invitation_message(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
messagesmock = mock.MagicMock()
self.mock_http302_postrequest(
requestuser=candidate1.relatedstudent.user,
messagesmock=messagesmock,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'decline_invite': ''
}
}
)
messagesmock.add.assert_called_once_with(
messages.SUCCESS,
'Declined group invitation from {}.'.format(candidate.relatedstudent.user.get_displayname()),
''
)
def test_decline_invitation_db(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
self.mock_http302_postrequest(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'decline_invite': ''
}
}
)
self.assertFalse(GroupInvite.objects.get(id=invite.id).accepted)
self.assertTrue(AssignmentGroup.objects.filter(id=group.id).exists())
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertEqual(AssignmentGroup.objects.get(id=group.id).cached_data.candidate_count, 1)
self.assertEqual(AssignmentGroup.objects.get(id=group1.id).cached_data.candidate_count, 1)
def test_invite_already_accepted_this_invite(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
parentnode__parentnode__long_name='Duck1010',
parentnode__long_name='Spring 2017',
long_name='Assignment 1'
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user, accepted=True)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
)
self.assertIn(
'You have already accepted this invite',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertFalse(mockresponse.selector.exists('form'))
def test_invite_already_declined_this_invite(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user,
sent_to=candidate1.relatedstudent.user, accepted=False)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
)
self.assertIn(
'You have already declined this invite',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertFalse(mockresponse.selector.exists('form'))
def test_accept_student_can_no_longer_invite(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
students_can_not_create_groups_after=timezone.now() - timedelta(days=1)
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_postrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertIn(
'Creating project groups without administrator approval is not '
'allowed on this assignment anymore. Please contact you course '
'administrator if you think this is wrong.',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertFalse(mockresponse.selector.exists('form'))
def test_accept_invite_students_can_not_create_groups(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
test_assignment.students_can_create_groups = False
test_assignment.save()
mockresponse = self.mock_http200_postrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertIn(
'This assignment does not allow students to form project groups on their own.',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertTrue(AssignmentGroup.objects.filter(id=group1.id).exists())
self.assertFalse(mockresponse.selector.exists('form'))
def test_get_student_can_no_longer_invite(self):
test_assignment = baker.make(
'core.Assignment',
students_can_create_groups=True,
students_can_not_create_groups_after=timezone.now() - timedelta(days=1)
)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertIn(
'Creating project groups without administrator approval is not '
'allowed on this assignment anymore. Please contact you course '
'administrator if you think this is wrong.',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertFalse(mockresponse.selector.exists('form'))
def test_get_students_can_not_create_groups(self):
test_assignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=test_assignment)
group1 = baker.make('core.AssignmentGroup', parentnode=test_assignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
test_assignment.students_can_create_groups = False
test_assignment.save()
mockresponse = self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'accept_invite': ''
}
}
)
self.assertIn(
'This assignment does not allow students to form project groups on their own.',
mockresponse.selector.one('.alert.alert-danger').alltext_normalized
)
self.assertFalse(mockresponse.selector.exists('form'))
def test_get_num_queries(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
with self.assertNumQueries(3):
self.mock_http200_getrequest_htmls(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
}
)
def test_post_decline_num_queries(self):
testassignment = baker.make('core.Assignment', students_can_create_groups=True)
group = baker.make('core.AssignmentGroup', parentnode=testassignment)
group1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
candidate = core_baker.candidate(group=group, fullname="April Duck", shortname="april@example.com")
candidate1 = core_baker.candidate(group=group1, fullname="Dewey Duck", shortname="dewey@example.com")
invite = baker.make('core.GroupInvite', group=group,
sent_by=candidate.relatedstudent.user, sent_to=candidate1.relatedstudent.user)
with self.assertNumQueries(13):
self.mock_http302_postrequest(
requestuser=candidate1.relatedstudent.user,
viewkwargs={
'invite_id': invite.id
},
requestkwargs={
'data': {
'decline_invite': ''
}
}
)
| 53.035907
| 122
| 0.662413
| 8,804
| 88,623
| 6.474103
| 0.030214
| 0.043107
| 0.062265
| 0.063757
| 0.966139
| 0.963507
| 0.950297
| 0.942787
| 0.934945
| 0.929331
| 0
| 0.013305
| 0.235898
| 88,623
| 1,670
| 123
| 53.067665
| 0.828404
| 0
| 0
| 0.778699
| 0
| 0
| 0.14705
| 0.00501
| 0
| 0
| 0
| 0
| 0.079082
| 1
| 0.054847
| false
| 0
| 0.008291
| 0
| 0.072704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
57e5c97d5a250431679849afaa5de89ed20c7cbf
| 13,511
|
py
|
Python
|
MISSIONS/air_fight/environment/world/replay.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
MISSIONS/air_fight/environment/world/replay.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
MISSIONS/air_fight/environment/world/replay.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\xe7\x50\x8c\x64\x26\x42\xd6\x01\xb9\x4e\x3f\xdf\x40\x98\x75\xef\xac\x7b\x63\x36\xc5\x93\x0f\xa5\x45\x39\x8b\x1e\xb7\xc8\xd4\xc3\x28\x3b\x32\xff\xee\x98\x64\x3b\xf3\x1d\x3b\xae\x8d\x5d\xb0\xac\x1e\xaa\xd2\x8f\x55\xd6\x62\x54\xc9\x2a\x5f\x02\x13\x06\xcd\x3b\x46\xa5\x35\xfd\xa7\xb1\x55\xc8\xdc\x32\x34\x46\x48\x94\x19\x21\x59\x86\x84\x12\xd1\xe7\x66\x77\x0d\x91\xa9\x8d\xd1\x25\x6c\x2d\xec\x4e\xc8\x16\xf2\xc8\x8d\xbb\x84\xa7\x36\x43\xd1\xe7\x9c\xbd\xd7\x47\xa0\x49\x75\x2a\x7d\x61\x78\x4e\x58\x1e\x60\xb7\x36\xbc\x34\x4e\x7d\xf8\x1a\x1b\xf2\x99\xf4\x93\x8e\x2a\x70\x46\x70\x75\x59\x04\xdd\xef\x13\x9d\x9d\xd6\xfa\x5b\xee\xe2\xb8\xae\xc3\xd6\x3a\x6f\x68\x54\x3c\x37\xd8\x54\xce\xb8\x3c\x08\x11\xc3\x66\xea\x45\x33\x06\x17\x01\x01\xe6\xfa\xfb\x9a\xdc\x81\x14\xa9\xd7\x53\xb6\x71\x91\x86\x9d\x21\xab\xc3\xf4\x5e\xfd\x59\x25\xc7\xaf\x1a\x44\xf2\x5d\x3b\x76\x29\x05\x8b\x7c\xb6\x9e\x71\x0f\xd9\x63\x93\x0c\x82\x0b\x59\xab\x0d\x41\x03\x69\x3f\x9b\xa2\x4a\x34\xa2\x52\x32\xb9\x34\xc0\x81\xde\xd6\x00\xfc\xac\x52\x9f\x24\xbe\x7a\x3e\x65\xf5\x36\x3c\x44\xe6\xb1\xda\x95\xfd\x0a\x6f\xee\xc1\x6b\x47\x47\xd4\x9f\x63\xb5\x60\xe6\x0e\x0e\x7f\x0a\x07\x3d\x60\xa5\xcf\x21\x61\xb3\xca\xc8\x81\xc9\x43\xa1\xeb\xd8\x5b\xf1\x53\x73\x5f\x37\x1b\x73\xa8\x7f\xc9\xc5\xa7\x0c\x62\xac\xc7\xad\x15\x43\xae\x16\x1a\xfe\x37\xd3\x85\x72\xea\xed\x83\x98\x41\x0a\x4e\xfe\xc1\xd5\x33\x4a\x37\x70\xb2\x64\x89\xa8\xb9\xf6\xb2\x00\x63\x8c\x1e\x40\x1a\xce\x18\xbd\x8c\xf3\xa1\x60\x85\x76\xe7\xae\x84\xc8\x23\x74\x56\x8f\x79\x49\xea\x9d\xd9\x3c\xb4\x11\x82\xf3\xf1\x56\xe4\x5b\xa2\x4c\xcc\x1e\xec\x6e\xf6\x68\x39\x91\xdb\xd9\xc0\x44\x5a\xe6\x96\x3f\x6c\xf6\x42\xe2\x60\xb9\x15\xbf\xdf\x4b\x58\x91\x9b\xa5\xb4\x08\x92\x34\x11\x09\xfe\x31\x8b\x54\x85\xee\x1d\xf1\xda\x89\x08\x53\x58\xb0\xc9\x20\x07\x35\xe2\xd8\xb4\x3c\x76\xcc\x84\xee\x32\xc6\xe2\xc3\x9b\xe9\xb1\xe0\x2f\x4a\x53\x1e\xa7\xce\x6f\xfe\x89\x2a\x18\xbb\xf2\xcc\x68\x10\xd5\xea\xfe\x15\x43\x4c\xa7\xdb\x85\x24\x29\xda\xa6\x6b\x26\x64\xe4\xa3\x02\xb5\x13\xdb\x9f\x43\x51\x49\xa6\xb4\xe8\x9c\x4e\x2d\x73\xa1\x15\xbc\x8f\x31\xc6\x53\x4f\x88\x4a\xef\xf3\x8a\xd4\x16\xa2\x87\x68\xbd\x25\xea\x93\xea\x75\xe6\xa5\x1d\x2e\x1a\x45\x09\xba\x70\xaa\xfb\x94\x48\x66\x83\x73\x57\xa0\xca\xec\xa4\x16\x11\x5d\x58\x01\x55\xea\xd0\x52\x3c\x69\xea\xcb\xa5\xc7\x92\xe6\x99\x26\x6f\xd2\x5f\xf1\x56\x9c\xcb\x26\xf2\x7c\x3e\x9d\x01\xff\x7b\x40\xd7\x20\xd0\x03\x94\x71\x43\x3b\x11\x64\x27\x37\xa2\x76\x0e\x5b\x3f\xc3\xd1\xba\x06\x5e\xe1\x43\x2e\x73\x2e\xbe\x0c\xcb\x21\xff\xbc\x06\xa0\xf3\x4b\xb3\x08\xb0\x1c\xf2\xde\xc1\x60\x1d\xc2\x61\x72\xfd\x15\x47\x6c\xe3\x2e\x46\xc9\xc5\x40\x32\x4a\x3e\x2b\xed\xc3\xa1\x19\x59\x35\xa0\x8f\x29\x5e\x79\x05\x42\xaa\x4b\x4f\xe2\x6b\x6f\x39\x96\xb0\xad\x8f\x7a\xee\x77\x53\xf5\x0e\xfc\x0d\x07\x8b\xd9\x4c\x38\x1c\xa6\xf1\x36\x5b\xcc\xeb\x61\xfb\x15\xbc\x22\xba\x84\x89\xa8\x40\x3b\x92\x01\xea\xe4\x21\x37\x42\x28\x6e\xe4\x20\x59\xbc\xf0\xef\xee\x9c\xe9\x96\x4e\xc6\x18\x94\x95\x50\xf2\xf3\x1f\x3e\xad\xa7\xca\x06\x6a\x0a\x0f\x99\x2d\x48\xe9\x82\xaa\x05\x82\xa8\xb4\xd6\xb7\x50\x7d\xa3\xed\xa1\x59\xa2\x44\x8c\x26\xc0\xf4\x0f\xfd\x75\xd5\x01\xd6\x42\x17\xce\x8c\xfd\xf3\x80\xb9\x82\xff\x17\x13\x33\xd6\xef\x0b\xa7\x95\x3d\x4a\xe1\xee\x57\xb0\x96\x1c\xc6\x2d\xac\x1c\xcb\xd1\x2d\xca\xa6\x4d\x72\x78\xf1\x01\x48\xce\xea\xf6\x89\x6b\x3b\x73\xb4\x15\x39\x00\xb5\x64\x6f\x3d\x62\x0a\x4e\x1c\x1f\xa0\x3e\xb6\x7b\xfc\x0c\xe8\xc3\x5d\xa5\xd4\xdf\x42\xa4\x0e\x28\xec\xfc\x4f\x73\x1a\x79\x81\xe7\xc9\x79\xe0\x4e\x58\x64\x87\x2b\xd5\x97\x99\xde\x71\xd3\x8a\xdb\x40\x0a\x1c\xed\xa0\xc7\xe7\x4e\x5c\x02\x7d\xca\x9d\xc5\x41\xe5\x09\x05\xbc\x14\x0c\xac\xb6\xf7\x4e\xaf\xd7\xcf\xc4\x31\xc3\x18\x92\x71\x16\x77\x5e\xe6\x79\xdd\x5d\x7a\x29\xcf\x80\x3b\x04\xf4\x07\x96\x88\xcd\xf0\x5e\x85\x07\x0e\x92\xdb\x99\xc0\xf6\x13\x16\x37\x38\xe1\xa0\xa3\xab\xd2\x19\x2d\xd4\x58\x7d\xa7\xd7\xe7\xb3\xd5\xe8\x64\x2b\xa7\x8e\xd9\xab\xe8\xaf\xd7\x90\x6c\x9f\xfa\x1f\xce\xb0\x8a\x16\x63\xab\xbd\xd5\xa6\xcf\x65\x88\x83\x43\x2a\x3d\x3b\x29\x9c\x4e\xa4\x28\xb1\x83\x45\xc9\x15\x06\xd3\x11\x39\x7b\xb7\x06\x51\xd3\x84\xdd\x49\x73\xdf\xf0\x98\x04\xc4\xb1\x58\xf1\xdb\x2a\x51\xc6\x78\xed\x6d\x8b\x11\x9b\x36\x09\x2b\x8d\xe2\x5f\x0f\x6b\x20\x54\x31\xe4\x1b\xea\x54\x1c\x48\x17\x11\x74\x42\x9c\xa8\x1b\xe7\x64\x1c\xcf\xd1\xd6\x3f\x7d\x9e\xd3\xf6\xd1\xab\xff\x3f\x94\xaf\x8d\x14\x5b\x18\x8c\x0c\xd3\xa2\x12\x60\x7c\x94\x85\x70\x5a\xc8\xbb\xd4\x43\x51\x31\xdc\x7b\xad\x30\xc7\x71\xde\x14\x82\xd5\xfb\xb9\xe2\x2f\x53\xee\x95\x98\x0a\x23\xdf\xc5\xcb\x46\xa4\xd6\xe8\x00\x52\xa3\xaa\x82\x74\x38\x66\xc8\xc0\x5f\x49\xbd\xad\x6b\x63\xda\x2d\xb9\xb6\x05\x78\x8b\x75\x5a\xf0\x0d\xe9\x37\x81\x0e\xe4\xc0\x2e\x32\x3a\x0f\x3b\xd6\x3a\x2f\xc0\x4d\xdf\xf2\x5c\xcb\x92\x4f\x7c\x12\xe5\x0b\x40\x9f\x41\xcb\x38\x0b\x67\x5e\x55\xd0\xa1\x43\x71\xb0\xc9\x5e\xeb\x25\xe4\x99\xf5\xc8\x5a\x6d\xf9\xb0\x0d\x5b\x11\xf7\x54\x88\x2d\xf3\xfd\xff\xbf\x60\xdc\xc5\xc2\xc7\x5e\xf2\x42\x85\x55\xde\x72\xf7\x2f\xff\x49\xb4\x6f\xa7\x28\x74\xf6\xed\x99\x91\x4a\x00\xf1\x5d\x1d\x5c\x80\x5b\x2f\x02\x50\xcf\x5e\x40\x04\xdf\x54\xc2\xd7\x43\xfb\x46\x9c\x16\x32\xbd\xfa\x46\xdd\x2b\xba\x85\x09\x2f\xb5\xad\x0f\x32\x05\x0f\x3a\xf9\x44\xe0\x6a\x5e\x43\x1b\xd5\xc7\x52\xdf\xd9\xfd\xa5\x1d\x46\x74\x7a\x13\x0e\x53\x44\x5c\xc0\xbb\x68\x12\x96\x07\x86\xf4\xad\xa1\xc3\x10\x1d\x0f\x37\x0c\x95\xd1\x86\x47\x1a\xe4\x02\x5a\xc1\x04\xea\xb0\x25\x08\x24\x2d\xc7\xa6\x70\xe6\x41\x2f\xfd\x24\x59\xfe\x8b\x7d\x9b\x98\xe5\xc8\x8e\x25\x2f\xaf\x40\xed\x4a\xac\x2d\xb5\x19\xa2\xbc\x19\xae\x2d\xd1\x48\x48\x2e\x61\x03\x05\xcd\x69\x16\x94\xc4\xc0\x44\x8e\x76\xc1\x6f\x0b\x5d\x95\x68\x03\x8e\x99\xd0\xca\xfc\x90\xa4\x97\x66\x64\xc8\x30\x15\x6b\xd4\x2b\x43\xf5\x15\xb2\x54\x80\xd1\xad\x4c\x81\x92\xfc\xf0\x39\xd5\xd4\x29\x68\x62\x1c\x80\xac\xc5\xbd\x25\xad\x41\x43\x8f\x8d\xc0\xba\xec\xaf\x46\x2c\x70\x42\x76\x27\x65\x86\x3d\x22\x8c\x1d\x70\xf9\xb9\x41\x41\x01\x74\x30\x42\xb0\xeb\x8f\xd7\xa3\x43\x08\xa6\x63\x02\xfa\x5b\x6d\x75\xbe\x0f\x9c\x26\x13\x51\x84\xd5\x49\x22\x1d\xd9\x67\xbc\x86\xba\x9d\x0b\x7a\x44\xc3\x70\x76\x31\xc0\x88\x01\xa2\x5d\x3b\xa8\x4b\xee\x9e\x0a\xa0\xd6\x59\xd9\x1e\x06\x8e\x41\xf0\x7f\x3d\xe1\xe6\x5d\xf3\x3e\x10\xba\xd4\xd8\x38\x87\xc7\x74\xdc\x7c\xc6\xde\x05\xb2\xd8\x26\x07\x68\x30\x3a\x03\xd7\x3e\xea\x98\x24\x49\x4c\x62\x2a\x9d\x89\x2f\x8c\x00\xc3\xd0\xab\x36\x10\xf0\x48\x50\xf3\x91\xdc\x0c\x05\x1e\x85\xa9\x56\x5d\x60\x8c\x6a\xe2\xe3\xc5\x55\xc6\xc9\x52\x44\xd9\x34\x71\x46\x7b\x92\x28\x67\x2e\x86\x25\x88\x63\x52\xe9\x09\xa5\x2a\x94\xb8\xd4\x4c\xa3\x36\x7c\x9e\x7b\xc9\x71\x50\x5e\x41\xf1\xef\x3c\xba\x39\x7a\xdb\xb4\xde\xf4\x08\x8b\x3a\x4c\xf8\x75\x75\x11\x41\xef\xc6\x06\x04\x99\x9f\x72\x4d\xaf\xe0\xa0\xc7\xe6\xdb\x9b\x4f\x46\xf8\x3e\x06\xd6\x73\xda\x49\x25\xaa\xb7\x1f\x42\xb4\xb4\x79\xa0\x51\x2e\x83\x82\xeb\x96\x3e\x21\x0f\x50\xb9\xc6\x30\xa1\x2e\xa0\x6c\x11\xb3\xa3\x9d\x35\xc6\x11\xe8\xa9\x06\xa3\x9c\x4a\xbe\x08\xca\x10\xf0\xb2\x41\x4c\x89\xf6\xec\x84\x34\x80\x53\xa1\xd4\xca\x5f\x70\xec\xa7\xae\xa2\xce\xee\x71\x21\x16\x8d\x03\x2d\x5f\xaf\x08\xfe\x1a\x23\xee\xfb\x15\x31\xb1\x2d\x25\xdf\x90\xc6\xb8\x87\x6a\x4f\x0b\xcf\xb5\xcb\xe7\x2d\x46\x0a\xbe\xb0\x65\xe6\x94\x16\x00\x0d\xe4\x75\x55\x08\xca\xc1\x96\xab\xc1\x27\xc3\x10\x50\x59\x56\x83\x92\xce\x01\xeb\x48\x8b\x55\x35\x93\xca\xb9\x10\x38\x78\x38\xde\xee\x79\x6d\x06\x8b\xc5\xb0\x9a\x3c\x6d\x39\xef\xb8\xc7\xe3\x78\xd0\x30\xc8\xdc\x7c\x50\x66\xf8\x1a\xfc\xc6\x37\xea\x12\x9f\x43\x1c\x3d\x99\x15\xc5\xd5\xd0\xfb\x37\x24\x94\x00\xc8\x1a\xa8\x09\x72\xe1\xc2\x1e\xd3\x08\xe2\xc2\x7e\x35\xf2\xad\xba\xf7\x09\xac\x76\xce\xde\x98\x2f\x53\x05\x30\x83\x14\xe0\x7e\x5f\x80\xc2\x36\x15\x8d\xbd\xf5\x9a\x60\x09\xc8\xc0\xcb\xb9\x72\x47\x32\x8c\xaf\x3b\xc7\x5b\xef\x3a\xf3\xf9\x7a\x49\x27\x3a\x98\x9d\xba\x3b\x11\xae\xac\x03\x7b\x2f\x5c\x78\x3b\x97\x33\xd6\x5d\x6d\x0f\xd5\xde\x2c\x27\x8c\x0f\xd8\x23\xc9\x2b\x2a\x5e\x7c\x0a\x82\xd5\xb7\x15\x31\x04\x45\x66\x6d\x58\x0c\xf0\xc4\xf5\xc1\x1b\xad\x02\xc2\x24\xe4\xca\x1f\xc6\x1c\xec\x66\xc8\xad\xea\x5b\x8b\xc4\x1c\x80\x76\x19\x66\xb4\x75\x1a\x16\x04\x11\x25\x4f\x91\x55\xf4\x3a\x92\x97\x5b\x02\xf6\x14\xde\x9f\x0d\xdd\x7a\xe1\x96\xed\x1e\xae\xc8\x9c\x3d\x11\x77\xc1\x52\x2d\xa4\x72\x3d\x6b\x46\x91\xc2\xa8\x8b\x83\xf2\x2f\xcb\xb7\xc6\x3f\xbe\x1e\xab\x3c\x07\xdf\x7e\x7e\xec\x80\x48\x01\x1b\xde\x03\xd2\xf4\xaa\xbe\x53\x08\x21\xb5\xaf\x53\x89\xb2\x48\x9b\xe4\x38\x38\xfe\x73\xa7\xee\xf8\x70\xec\xaa\x83\x76\xa1\x8d\xc9\x34\x4e\x74\xe7\x74\xbb\x83\x15\x27\x4a\x35\x25\x4d\x68\x0e\xa0\xdd\x44\x40\x75\x7c\x3b\xaf\x28\xac\xe0\xfd\xaa\x61\x98\xb2\x8e\xfa\x8c\x91\x95\x64\xcd\xe9\x7c\x3a\x6f\x83\x71\xb8\xb9\xf0\xce\xc3\x50\xc7\x02\xfe\xe3\x4c\xab\x50\xa0\xd9\x9c\x51\xba\xb2\x32\x0f\x1f\x9c\xff\x64\x86\xaf\x32\x97\x03\xae\x81\x19\x48\x4c\x60\x6f\x56\x39\x62\xfa\x4d\x92\x87\x21\x59\x31\xab\x35\x59\x17\x1c\xaa\xc0\x53\x37\xe7\x4f\x5a\xb2\xf7\x8d\x97\x7d\xfd\x7c\xd2\xeb\xc1\xb3\x8f\x94\x0b\x1e\xc3\x85\x0a\x5d\xa4\x6d\xd7\x5a\x2f\xed\x12\x96\x77\xa1\xb5\xda\x8d\xa6\x79\x6f\x04\x1d\x1c\x0a\xdd\x6a\x52\x31\x65\x83\x0a\xa7\x55\x24\x48\xcb\xdf\xb9\xa8\x06\x69\xeb\x26\xc9\x43\x6b\x3c\xe4\x19\x1c\x63\xe3\x34\x68\x51\x47\x8e\xdf\xcf\x78\xbc\x3e\x96\x9e\x04\x4b\xee\xa9\x5b\x96\x8c\xa8\x31\xd0\x9f\xcd\x06\xfc\x0f\x7b\x27\x73\xd4\x44\xe0\xb7\x66\x12\x58\x24\xbf\x44\x70\x74\xac\x5c\x5e\xd7\xfb\xc9\xc9\xa1\xb6\xab\x0a\x09\x62\x97\x2c\x01\x13\x94\xaf\x3b\xf1\xd4\x37\x87\xce\x13\x3b\x86\x97\xd7\x9d\xe5\xb1\xdb\xbc\xeb\x7c\x08\xc3\x64\xf2\x29\x2a\x59\xcf\x5b\x50\x16\xd0\x1e\x8a\x2e\xd0\x1b\xe0\x0e\xd8\xaa\x12\x81\xa3\x3b\xfa\xbe\x91\xb3\x9e\x44\x14\xa1\xaf\x12\x77\x61\x67\x88\x6a\x41\xe0\xa7\xc0\x6a\x35\x7b\x70\xe9\xbb\xb7\x96\xa8\x86\x39\x44\x86\xce\x8b\xa0\x08\x62\x1b\x64\xda\x29\x00\x69\x64\xb4\x45\x43\x4c\xa7\xd0\x86\xd2\xca\xf6\x7b\x7d\x48\xd1\x47\x25\x1f\xbc\x5b\x36\x2c\xb0\x4a\xb8\x8e\x5c\x8b\x86\xba\x71\xbe\xe3\xb3\xa2\xd8\x3c\xff\x85\x39\x75\x8f\x73\x23\xc3\x1c\x00\xb3\xda\xab\x7e\x54\x26\xbc\x7e\x35\x96\x98\xc9\xcc\xca\x26\x2f\x53\x62\xe2\x05\x5c\x2c\x11\x08\x39\xab\x80\x15\xc1\xcf\x78\xaa\x92\xc8\x7a\xc3\xc8\x88\x66\xe6\x91\x96\x21\x7a\xe5\x81\xea\x47\x70\x81\x56\xf6\xc8\xe5\x10\x5c\xb0\x83\xac\xc4\x09\x4f\xea\xb3\x91\x14\x5a\xbb\x7a\x58\x1d\xde\xcd\x28\xe2\xd1\x3b\x3e\xd4\x7f\x85\x8e\x0d\xdd\x94\x9f\xfd\xbf\x81\x2e\x4f\x3c\xd5\xc2\xc4\x4f\x83\xb7\x7e\xc2\x33\x4b\xf5\x1f\xa0\x10\x28\x89\xb7\x0f\x7a\x9e\x25\xea\xb9\xff\x3a\xbf\x5f\x65\x4a\x98\x52\xac\x9c\xc1\x35\x1c\x56\x53\x6b\x8a\x83\x9e\xb4\xb0\x36\x43\x78\x19\xac\x6c\x76\x47\xb6\xec\x38\xf6\x4e\xb7\xa5\x2a\x81\x8d\x6f\x33\x92\x77\x96\x25\xb2\x85\x60\xe2\x73\x41\xcd\xc9\x5a\x25\xe1\x22\x16\x0a\x70\x25\x62\x4f\x81\xda\xcc\xd5\x4e\xe5\x4a\x89\x22\xd7\xb6\xbc\x07\x37\xbb\xe1\x31\x3c\x8c\x25\x88\xdb\x96\xe8\x19\xda\x26\xbb\x2c\x02\x63\x2b\x70\x5e\xd0\x9c\x35\xec\xc7\x14\xd8\x1e\x15\xd4\xea\xc9\xea\x21\xc4\x84\xe2\xa2\x62\xf7\xf7\x16\x53\x7c\xd1\xdc\x18\x00\x7d\x9d\xc5\x75\x61\x7e\x03\x19\x0f\x09\x1b\xb3\xec\x76\x6b\x8a\xce\x61\x36\x06\x04\x09\x33\x8e\xcd\x0c\xd1\x38\x8c\xea\x86\x07\xb5\x09\x3c\x39\x69\xe6\xc7\x6a\xd4\x40\xac\x88\x3c\x5b\x1e\x23\xd1\x82\x6c\x20\xdc\x03\x44\x2f\xbd\xf1\x06\x2f\x23\xd8\xc5\xd5\x40\x22\x18\x33\xdb\x3f\xd5\x3e\xa9\x46\x09\x91\x49\xc2\x87\xdf\x96\xa1\x2b\x72\x21\xe7\xb6\xc4\xf4\x25\x8e\x91\x4d\x01\xa2\x7d\x59\x8e\x1f\x4a\x5b\x07\x29\x36\x1d\x65\x56\xcb\x08\x5d\x8a\x5d\xa9\x15\xd4\xe1\x0a\x2d\x8f\x12\xea\x2e\x26\xa4\x18\xd7\x0b\xdd\x76\xf1\xda\x7e\x82\x93\x0f\x0e\x13\x1c\xf8\x19\xe1\x2b\x29\x0c\x2a\xd3\x77\x6c\x24\x3d\x56\xa8\x4d\xdc\xaa\x8e\xf8\x8e\xf7\x46\x11\x97\xc3\x9e\xbb\x42\x7e\x7d\xa2\x32\x5e\x29\xb2\x78\xcc\x31\x0e\x2d\xd8\xde\x55\xd3\x99\x93\x48\x8b\x70\x25\xef\x8e\x00\xde\x06\x7c\x86\x21\x03\xb9\x48\x95\xf3\xc4\x57\xc1\xba\xdb\x06\x37\xca\xd0\x1d\x88\x8e\xb5\x83\xd1\x5e\x6b\xc2\x30\xa7\xe9\x3e\x70\x39\x60\x57\x7e\x48\x2a\x49\x86\x92\xc1\xdc\x32\xe1\x4b\x24\x75\xdb\x93\x0a\xc4\x50\x71\xb8\xa8\x6c\xae\xd3\xa8\x30\x64\xbc\x09\x2c\xc0\x3b\x31\x33\x1b\x35\x5d\x9c\x17\xe4\xeb\x4d\xaa\x96\x3e\xc8\x26\x4d\x00\x21\x85\xec\xb0\xcd\x75\xda\xee\x06\x9c\x25\x55\x13\x5e\x20\x5a\xf7\x98\x96\x32\xb6\xfa\xfd\x8a\xbb\x6c\x09\x03\x54\x99\x06\xd1\x89\x23\xea\xd8\xb1\x6d\xc4\xa4\xb2\x89\xe6\xde\xe9\x68\xbf\xd2\x24\xa9\xd2\x51\xcd\xe5\xb0\x4f\x8c\x8c\xae\x34\x30\xa4\xa0\x57\xa2\xab\x7c\xc5\x0e\x73\x34\x38\x54\x7f\x87\x39\x71\x14\x13\xe1\xce\x76\x47\x31\x75\xc9\x61\xae\xab\x12\xe0\x9d\xc5\x57\x1c\xe7\x51\xcf\x33\xc0\x3a\x0d\x9b\xcd\xb0\xb1\xf8\xe5\xfc\x41\x49\xdc\x9b\xe1\xcd\x34\x06\x22\x03\x3c\x3d\x6d\x13\x93\x8d\x18\x7d\x62\x1d\x6d\x77\x91\x19\x34\xcd\x6b\x1d\xb8\xfe\xf0\xd7\xb5\x02\xc2\x0e\x77\xc1\x31\x4d\xbb\xc8\x3f\x1f\xd9\x18\x0d\xd8\x3c\x19\x1b\xbe\xf1\x18\x8b\x80\x41\x5b\xe5\x10\xd3\xc7\x79\xfa\xb4\x1e\x66\x9c\xed\xba\x85\x3f\xa9\x18\x71\xad\x1b\xa1\x79\x25\x2a\x4a\xb3\xe2\x1e\x1d\x48\xa1\x5f\x7d\x5d\x76\x02\x74\xb7\x3e\xc1\x7a\xa3\x1c\xd0\xba\x19\xe7\x7f\x37\xb3\x44\xde\xfa\xe3\xf7\x0d\x1b\x9d\x1d\x41\x65\x85\xbe\x93\xd0\x3e\xf6\x8f\x19\x85\xb4\x62\x12\x1e\x7f\x53\xe0\x55\x5a\x37\x7e\xe6\xee\x8d\x53\x2d\x36\x17\x5f\x0e\x32\x8c\x68\x70\x2d\x7b\x43\x58\x37\x40\x5b\xef\x65\xab\x53\xf1\xf1\xda\x8e\xe7\x47\x1e\xf2\xd4\x2f\xdf\x82\xcd\x44\xca\xad\x2c\xbe\xf7\xf2\x0d\xb8\x1c\xaf\xbc\xb1\xc3\x88\x85\x2e\xb5\xbe\x48\x44\x86\x30\x83\x23\x89\x65\x93\xe6\x24\xff\x7a\x0b\x74\xdf\x86\x09\x83\x10\x34\x6d\x25\xc9\xc9\x2d\x5b\xa8\xe0\xde\x7e\xf2\x39\x44\xb3\xe8\x97\x47\xe4\x25\x56\x7d\x0b\x4f\x49\x7f\x4b\x2f\x6b\x7e\xa2\x5b\x47\x86\xe6\xc9\x03\x69\xad\xf3\x09\x41\x4d\x2b\x71\x6c\x92\xd9\x7e\xd8\x24\x01\x9f\xd1\xff\x88\x27\x9b\x9a\xfb\xbb\x44\x1b\x48\x39\x46\xb0\x6d\x7d\x02\xc9\xc5\xba\x10\x4d\xe5\x45\x65\x05\x5a\x84\x22\x12\x97\x70\x86\xd5\x51\x5d\xec\xf8\x2f\x71\xa6\x73\x53', 1)
| 13,511
| 13,511
| 0.749981
| 3,373
| 13,511
| 3.000593
| 0.077379
| 0.001778
| 0.001778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.312111
| 0.000222
| 13,511
| 1
| 13,511
| 13,511
| 0.437148
| 0
| 0
| 0
| 0
| 1
| 0.99704
| 0.99704
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
17b1050e7a459a10d0ed98c5d23a04d8637d0c93
| 9,157
|
py
|
Python
|
unit_tests/test_std_replay_data.py
|
abraker-osu/osu_analyzer
|
f930b1e75d1c4c973dfa49fdab2afedb2a432e31
|
[
"MIT"
] | null | null | null |
unit_tests/test_std_replay_data.py
|
abraker-osu/osu_analyzer
|
f930b1e75d1c4c973dfa49fdab2afedb2a432e31
|
[
"MIT"
] | null | null | null |
unit_tests/test_std_replay_data.py
|
abraker-osu/osu_analyzer
|
f930b1e75d1c4c973dfa49fdab2afedb2a432e31
|
[
"MIT"
] | null | null | null |
import unittest
from replay_reader import ReplayIO
from analysis.std.replay_data import StdReplayData
class TestStdReplayData(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDown(cls):
pass
def test_get_replay_data(self):
replay = ReplayIO.open_replay('unit_tests/replays/osu/LeaF - I (Maddy) [Terror] replay_0.osr')
replay_data = StdReplayData.get_replay_data(replay)
replay = ReplayIO.open_replay('unit_tests/replays/osu/osu! - perfect_test [score_test] (2019-06-07) Osu.osr')
replay_data = StdReplayData.get_replay_data(replay)
replay = ReplayIO.open_replay('unit_tests/replays/osu/Toy - Within Temptation - The Unforgiving [Marathon] (2018-02-06) Osu.osr')
replay_data = StdReplayData.get_replay_data(replay)
def test_press_times(self):
replay = ReplayIO.open_replay('unit_tests/replays/osu/osu! - perfect_test [score_test] (2019-06-07) Osu.osr')
replay_data = StdReplayData.get_replay_data(replay)
press_times = StdReplayData.press_times(replay_data)
self.assertEqual(len(press_times), 11)
def test_release_times(self):
replay = ReplayIO.open_replay('unit_tests/replays/osu/osu! - perfect_test [score_test] (2019-06-07) Osu.osr')
replay_data = StdReplayData.get_replay_data(replay)
release_times = StdReplayData.release_times(replay_data)
self.assertEqual(len(release_times), 11)
def test_get_key_state(self):
# Shorthand
FREE = StdReplayData.FREE
PRESS = StdReplayData.PRESS
HOLD = StdReplayData.HOLD
RELEASE = StdReplayData.RELEASE
# free -> all free
key_state = StdReplayData._StdReplayData__get_key_state(FREE, [ FREE, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, FREE)
# TODO: this is an illegal state (you can't have a press transition into a free, it must go to a release first)
#key_state = StdReplayData._StdReplayData__get_key_state(PRESS, [ FREE, FREE, FREE, FREE ], press_block=False, release_block=False)
#self.assertEqual(key_state, )
# TODO: this is an illegal state (you can't have a hold transition into a free, it must go to a release first)
#key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ FREE, FREE, FREE, FREE ], press_block=False, release_block=False)
#self.assertEqual(key_state, )
# release -> all free
key_state = StdReplayData._StdReplayData__get_key_state(RELEASE, [ FREE, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, FREE)
# free -> one press
key_state = StdReplayData._StdReplayData__get_key_state(FREE, [ PRESS, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# press -> one press
key_state = StdReplayData._StdReplayData__get_key_state(PRESS, [ PRESS, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# hold -> one press (non blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# hold -> one press (blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, FREE, FREE, FREE ], press_block=True, release_block=False)
self.assertEqual(key_state, HOLD)
# TODO: this is an illegal state (you can't have a release transition into a press, it must go to a free first)
#key_state = StdReplayData._StdReplayData__get_key_state(RELEASE, [ PRESS, FREE, FREE, FREE ], press_block=False, release_block=False)
#self.assertEqual(key_state, )
# TODO: this is an illegal state (you can't have a free transition into a hold, it must go to a press first)
#key_state = StdReplayData._StdReplayData__get_key_state(FREE, [ HOLD, FREE, FREE, FREE ], press_block=False, release_block=False)
#self.assertEqual(key_state, )
# press -> one hold
key_state = StdReplayData._StdReplayData__get_key_state(PRESS, [ HOLD, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, HOLD)
# hold -> one hold
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ HOLD, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, HOLD)
# TODO: this is an illegal state (you can't have a release transition into a hold, it must go to press first)
#key_state = StdReplayData._StdReplayData__get_key_state(RELEASE, [ HOLD, FREE, FREE, FREE ], press_block=False, release_block=False)
#self.assertEqual(key_state, )
# press -> one release
key_state = StdReplayData._StdReplayData__get_key_state(PRESS, [ RELEASE, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, RELEASE)
# Test hold -> one release (non blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ RELEASE, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, RELEASE)
# hold -> one release (blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ RELEASE, FREE, FREE, FREE ], press_block=False, release_block=True)
self.assertEqual(key_state, RELEASE)
# release -> one release
key_state = StdReplayData._StdReplayData__get_key_state(RELEASE, [ RELEASE, FREE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, FREE)
# free -> one press, one release
key_state = StdReplayData._StdReplayData__get_key_state(FREE, [ PRESS, RELEASE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# press -> one press, one release
key_state = StdReplayData._StdReplayData__get_key_state(FREE, [ PRESS, RELEASE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# hold -> one press, one release (non blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, RELEASE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# hold -> one press, one release (press blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, RELEASE, FREE, FREE ], press_block=True, release_block=False)
self.assertEqual(key_state, RELEASE)
# hold -> one press, one release (release blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, RELEASE, FREE, FREE ], press_block=False, release_block=True)
self.assertEqual(key_state, PRESS)
# hold -> one press, one release (press blocking, release blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, RELEASE, FREE, FREE ], press_block=True, release_block=True)
self.assertEqual(key_state, RELEASE)
# hold -> one hold, one release (non blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ HOLD, RELEASE, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, RELEASE)
# hold -> one hold, one release (blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ HOLD, RELEASE, FREE, FREE ], press_block=False, release_block=True)
self.assertEqual(key_state, HOLD)
# hold -> release, hold (blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ RELEASE, HOLD, FREE, FREE ], press_block=False, release_block=True)
self.assertEqual(key_state, HOLD)
# hold -> release, hold (not blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ RELEASE, HOLD, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, RELEASE)
# hold -> one hold, one press (not blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ HOLD, PRESS, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# hold -> one hold, one press (blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ HOLD, PRESS, FREE, FREE ], press_block=True, release_block=False)
self.assertEqual(key_state, HOLD)
# hold -> one hold, one press (not blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, HOLD, FREE, FREE ], press_block=False, release_block=False)
self.assertEqual(key_state, PRESS)
# hold -> one hold, one press (blocking)
key_state = StdReplayData._StdReplayData__get_key_state(HOLD, [ PRESS, HOLD, FREE, FREE ], press_block=True, release_block=False)
self.assertEqual(key_state, HOLD)
| 51.734463
| 143
| 0.701867
| 1,165
| 9,157
| 5.232618
| 0.070386
| 0.12336
| 0.057743
| 0.1729
| 0.909777
| 0.905348
| 0.894521
| 0.894521
| 0.878117
| 0.776411
| 0
| 0.00506
| 0.201485
| 9,157
| 176
| 144
| 52.028409
| 0.828638
| 0.246369
| 0
| 0.47619
| 0
| 0.047619
| 0.056139
| 0.019539
| 0
| 0
| 0
| 0.005682
| 0.333333
| 1
| 0.071429
| false
| 0.02381
| 0.035714
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aa1407ef26ba307a12aa88f928c9f29071c4075c
| 550
|
py
|
Python
|
eval_covid20cases_timm-regnetx_002_RandomCrop.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_RandomCrop.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_RandomCrop.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_0_RandomCrop.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_1_RandomCrop.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_2_RandomCrop.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_3_RandomCrop.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_4_RandomCrop.yml",
]
for l in ls:
os.system(l)
| 50
| 104
| 0.849091
| 80
| 550
| 5.4625
| 0.3
| 0.114416
| 0.1373
| 0.217391
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0
| 0.057803
| 0.056364
| 550
| 11
| 105
| 50
| 0.7842
| 0
| 0
| 0
| 0
| 0
| 0.880218
| 0.653358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a4c4c11b932748bc86132b7691c58eebc4c36ed1
| 21,441
|
py
|
Python
|
petroeval/plots.py
|
olawaleibrahim/petroeval
|
d52ed03f8d79377aa9ff0781fc670a0d5b1cbc5c
|
[
"MIT"
] | 17
|
2020-07-09T11:04:46.000Z
|
2021-09-21T17:56:33.000Z
|
petroeval/plots.py
|
gladaki/petroeval
|
561857e5ae37d14915643f54702a2e8d8300eb91
|
[
"MIT"
] | 4
|
2020-07-09T11:40:11.000Z
|
2021-08-19T23:05:01.000Z
|
petroeval/plots.py
|
gladaki/petroeval
|
561857e5ae37d14915643f54702a2e8d8300eb91
|
[
"MIT"
] | 13
|
2020-07-09T11:11:21.000Z
|
2021-08-23T13:44:06.000Z
|
'''
Module for log displays and visualizations
Functions
four_plot(logs, top, base, depth=False)
four_plots(logs, x1, x2, x3, x4, top, base, depth=False)
three_plots(logs, x1, x2, x3, top, base, depth=False)
two_plots(logs, x1, x2, top, base, depth=False)
two_plot(logs, x1, x2, top, base, depth=False, scale=False)
one_plot(logs, x1, top, base, depth=False)
make_facies_log_plot(logs, x1, x2, x3, x4, x5, Depth=False)
compare_plots(logs, x1, x2, x3, x4, x5, Depth=False)
'''
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matplotlib.colors as colors
import matplotlib.pyplot as plt
from .utils import process
import numpy as np
import warnings
warnings.filterwarnings('ignore')
def four_plot(logs, top, base, depth=False):
'''
Function to automatically plot well logs
Returns a plot of four logs(Gamma ray, Porosity, Density and Resistivity)
args::
logs: Dataframe object of well logs
depth: Set to false or leave as default to use dataframe index
Set to column title if column depth should be used
'''
logs = process(logs)
if depth == False:
logs['DEPTH'] = logs.index
logs = logs.reset_index(drop=True)
else:
depth = np.array(logs[depth])
logs = logs.reset_index(drop=True)
logs['DEPTH'] = depth
logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))]
try:
logs = logs.sort_values(by='DEPTH')
f, ax = plt.subplots(nrows=1, ncols=4, figsize=(12,10))
for i in range(len(ax)):
ax[i].set_ylim(top, base)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=4)
if logs.NPHI.max() == np.Inf or logs.NPHI.max() == np.nan:
nphi_max = 0.9
ax[0].plot(logs.GR, logs.DEPTH, color='black')
ax[1].plot(logs.NPHI, logs.DEPTH, color='c')
ax[2].plot(logs.RHOB, logs.DEPTH, color='blue')
ax[3].plot(logs.RT, logs.DEPTH, color='red')
ax[0].set_xlabel("GR (API)")
ax[0].set_xlim(logs.GR.min(), nphi_max)
ax[0].set_ylabel("Depth(ft)")
ax[0].set_title(f"Plot of Depth Against GR")
ax[1].set_xlabel("NPHI (v/v)")
ax[1].set_xlim(0, logs.NPHI.max())
ax[1].set_title(f"Plot of Depth Against Neutron Porosity")
ax[2].set_xlabel("RHOB (g/cm3)")
ax[2].set_xlim(logs.RHOB.min(),logs.RHOB.max())
ax[2].set_title(f"Plot of Depth Against Density")
ax[3].set_xlabel("RT (ohm.m)")
ax[3].set_xscale("log")
ax[3].set_xlim(logs.RT.min(), logs.RT.max())
ax[3].set_title(f"Plot of Depth Against Resistivity")
except NameError as err:
print(f'Depth column could not be located. {err}')
def four_plots(logs, x1, x2, x3, x4, top, base, depth=False):
'''
Function to automatically plot well logs
Returns
--------
plot of four logs(x1, x2, x3, x4)
Arguments
----------
logs: Dataframe object of well logs
depth: Set to false or leave as default to use dataframe index
Set to column title if column depth should be used
'''
logs = process(logs)
#Setting the value of the y axis. Using index or property specified
if depth == False:
logs['DEPTH'] = logs.index
logs = logs.reset_index(drop=True)
else:
depth = np.array(logs[depth])
logs = logs.reset_index(drop=True)
logs['DEPTH'] = depth
logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))]
try:
logs = logs.sort_values(by='DEPTH')
#top = logs.DEPTH.min()
#bot = logs.DEPTH.max()
f, ax = plt.subplots(nrows=1, ncols=4, figsize=(10,10))
for i in range(len(ax)):
ax[i].set_ylim(top, base)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=4)
ax[0].plot(logs[x1], logs.DEPTH, color='black')
ax[1].plot(logs[x2], logs.DEPTH, color='c')
ax[2].plot(logs[x3], logs.DEPTH, color='blue')
ax[3].plot(logs[x4], logs.DEPTH, color='red')
ax[0].set_xlabel(f"{x1} ")
if x1 == 'RT':
ax[0].set_xscale("log")
ax[0].set_xlim(logs[x1].min(), logs[x1].max())
ax[0].set_ylabel("Depth(ft)")
ax[0].set_title(f"Plot of Depth Against {x1}")
ax[1].set_xlabel(f"{x2} ")
if x2 == 'RT':
ax[1].set_xscale("log")
ax[1].set_xlim(logs[x2].min(),logs[x2].max())
ax[1].set_title(f"Plot of Depth Against {x2}")
ax[2].set_xlabel(f"{x3}")
if x3 == 'RT':
ax[2].set_xscale("log")
ax[2].set_xlim(logs[x3].min(),logs[x3].max())
ax[2].set_title(f"Plot of Depth Against {x3}")
if x4 == 'RT':
ax[3].set_xscale("log")
ax[3].set_xlim(logs[x3].min(),logs[x3].max())
ax[3].set_title(f"Plot of Depth Against {x4}")
ax[3].set_xlabel(f"{x4}")
except NameError as err:
print(f'Depth column could not be located. {err}')
def three_plots(logs, x1, x2, x3, top, base, depth=False):
'''
Function to automatically plot well logs
Returns
-------
plot of three logs(x1, x2, x3)
Arguments
---------
logs: Dataframe object of well logs
depth: Set to false or leave as default to use dataframe index
Set to column title if column depth should be used
'''
logs = process(logs)
#Setting the value of the y axis. Using index or property specified
if depth == False:
logs['DEPTH'] = logs.index
logs = logs.reset_index(drop=True)
else:
depth = np.array(logs[depth])
logs = logs.reset_index(drop=True)
logs['DEPTH'] = depth
logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))]
try:
logs = logs.sort_values(by='DEPTH')
#top = logs.DEPTH.min()
#bot = logs.DEPTH.max()
f, ax = plt.subplots(nrows=1, ncols=3, figsize=(10,10))
for i in range(len(ax)):
ax[i].set_ylim(top, base)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=4)
ax[0].plot(logs[x1], logs.DEPTH, color='black')
ax[1].plot(logs[x2], logs.DEPTH, color='c')
ax[2].plot(logs[x3], logs.DEPTH, color='blue')
ax[0].set_xlabel(f"{x1} ")
if x1 == 'RT':
ax[0].set_xscale("log")
ax[0].set_xlim(logs[x1].min(), logs[x1].max())
ax[0].set_ylabel("Depth(ft)")
ax[0].set_title(f"Plot of Depth Against {x1}")
ax[1].set_xlabel(f"{x2} ")
if x2 == 'RT':
ax[1].set_xscale("log")
ax[1].set_xlim(logs[x2].min(),logs[x2].max())
ax[1].set_title(f"Plot of Depth Against {x2}")
ax[2].set_xlabel(f"{x3}")
if x3 == 'RT':
ax[2].set_xscale("log")
ax[2].set_xlim(logs[x3].min(),logs[x3].max())
ax[2].set_title(f"Plot of Depth Against {x3}")
except NameError as err:
print(f'Depth column could not be located. {err}')
def two_plots(logs, x1, x2, top, base, depth=False):
'''
Function to automatically plot well logs
Returns a plot of two logs(x1, x2)
args::
logs: Dataframe object of well logs
depth: Set to false or leave as default to use dataframe index
Set to column title if column depth should be used
'''
logs = process(logs)
#Setting the value of the y axis. Using index or property specified
if depth == False:
logs['DEPTH'] = logs.index
logs = logs.reset_index(drop=True)
else:
depth = np.array(logs[depth])
logs = logs.reset_index(drop=True)
logs['DEPTH'] = depth
#logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))]
try:
logs = logs.sort_values(by='DEPTH')
f, ax = plt.subplots(nrows=1, ncols=2, figsize=(8,10))
for i in range(len(ax)):
ax[i].set_ylim(top, base)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=4)
ax[0].plot(logs[x1], logs.DEPTH, color='black')
ax[1].plot(logs[x2], logs.DEPTH, color='c')
ax[0].set_xlabel(f"{x1} ")
if x1 == 'RT':
ax[0].set_xscale("log")
ax[0].set_xlim(logs[x1].min(), logs[x1].max())
ax[0].set_ylabel("Depth(ft)")
ax[0].set_title(f"Plot of Depth Against {x1}")
ax[1].set_xlabel(f"{x2} ")
if x2 == 'RT':
ax[1].set_xscale("log")
ax[1].set_xlim(logs[x2].min(),logs[x2].max())
ax[1].set_title(f"Plot of Depth Against {x2}")
except NameError as err:
print(f'Depth column could not be located. {err}')
def two_plot(logs, x1, x2, top, base, depth=False, scale=False):
'''
Function to automatically plot well logs
Returns a plot of two logs(x1, x2)
args::
logs: Dataframe object of well logs
depth: Set to false or leave as default to use dataframe index
Set to column title if column depth should be used
#Converting the values of the resistivity logs to log scale
if x1 == 'RT':
logs[x1] = np.log(logs[x1])
#logs[x1] = logs[x1].replace({np.Inf:0, np.nan:0}, inplace=False)
if x2 == 'RT':
logs[x2] = np.log(logs[x2])
#logs[x2] = logs[x2].replace({np.Inf:0, np.nan:0}, inplace=False)
'''
logs = process(logs)
#Setting the value of the y axis. Using index or property specified
if depth == False:
logs['DEPTH'] = logs.index
logs = logs.reset_index(drop=True)
else:
depth = np.array(logs[depth])
logs = logs.reset_index(drop=True)
logs['DEPTH'] = depth
logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))]
if scale == True:
try:
logs = logs.sort_values(by='DEPTH')
f, ax = plt.subplots(nrows=1, ncols=2, figsize=(8,10))
for i in range(len(ax)):
ax[i].set_ylim(top, base)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=4)
if logs[x1].min() < logs[x2].min():
x_min=logs[x1].min()
else:
x_min=logs[x2].min()
if logs[x1].max() < logs[x2].max():
x_max=logs[x1].max()
else:
x_max=logs[x2].max()
ax[0].plot(logs[x1], logs.DEPTH, color='black')
ax[1].plot(logs[x2], logs.DEPTH, color='c')
ax[0].set_xlabel(f"{x1} ")
if x1 == 'RT':
ax[0].set_xscale("log")
ax[0].set_xlim(x_min, x_max)
ax[0].set_ylabel("Depth(ft)")
ax[0].set_title(f"Plot of Depth Against {x1}")
ax[1].set_xlabel(f"{x2} ")
if x2 == 'RT':
ax[1].set_xscale("log")
ax[1].set_xlim(x_min, x_max)
ax[1].set_title(f"Plot of Depth Against {x2}")
except NameError as err:
print(f'Depth column could not be located. {err}')
elif scale == False:
try:
logs = logs.sort_values(by='DEPTH')
f, ax = plt.subplots(nrows=1, ncols=2, figsize=(8,10))
for i in range(len(ax)):
ax[i].set_ylim(top, base)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=4)
ax[0].plot(logs[x1], logs.DEPTH, color='black')
ax[1].plot(logs[x2], logs.DEPTH, color='c')
ax[0].set_xlabel(f"{x1} ")
if x1 == 'RT':
ax[0].set_xscale("log")
ax[0].set_xlim(logs[x1].min(), logs[x1].max())
ax[0].set_ylabel("Depth(ft)")
ax[0].set_title(f"Plot of Depth Against {x1}")
ax[1].set_xlabel(f"{x2} ")
if x2 == 'RT':
ax[1].set_xscale("log")
ax[1].set_xlim(logs[x2].min(),logs[x2].max())
ax[1].set_title(f"Plot of Depth Against {x2}")
except NameError as err:
print(f'Depth column could not be located. {err}')
else:
print(f'Attributes takes in True or False')
def one_plot(logs, x1, top, base, depth=False):
'''
Function to automatically plot a single well log
args::
logs: Dataframe object of well logs
depth: Set to false or leave as default to use dataframe index
Set to column title if column depth should be used
'''
logs = process(logs)
#Setting the value of the y axis. Using index or property specified
if depth == False:
logs['DEPTH'] = logs.index
logs = logs.reset_index(drop=True)
else:
depth = np.array(logs[depth])
logs = logs.reset_index(drop=True)
logs['DEPTH'] = depth
logs = logs.loc[(logs.DEPTH >= float(top)) & (logs.DEPTH <= float(base))]
try:
logs = logs.sort_values(by='DEPTH')
f, ax = plt.subplots(nrows=1, ncols=1, figsize=(6,15))
ax.plot(logs[x1], logs.DEPTH, color='black')
ax.set_ylim(top, base)
ax.plot(logs[x1], logs.DEPTH, color='black')
ax.invert_yaxis()
ax.grid()
ax.locator_params(axis='x', nbins=4)
ax.set_xlabel(f"{x1}")
if x1 == 'RT':
ax.set_xscale("log")
ax.set_xlim(logs[x1].min(), logs[x1].max())
ax.set_ylabel("Depth(ft)")
ax.set_title(f"Plot of Depth Against {x1}")
except NameError as err:
print(f'Depth column could not be located. {err}')
logs[x1] = np.log10(logs[x1])
'''
The functions below are adapted and modified from the SEG 2015 tutorials on SEG's
github page "The Leading Edge column";
https://github.com/seg/tutorials-2016/blob/master/1610_Facies_classification/
'''
def make_facies_log_plot(logs, x1, x2, x3, x4, x5, Depth=False):
'''
Plots well logs against depth and corresponding predicted lithofacies
in a labelled color plot. Prediction column shoulod be titled 'Facies' and in integers
for better representation
Arguments
---------
logs: dataframe with predicted values column as "Facies"
log1: str -> well log 1
''''''''''''''''''''
log5: str -> well log 2
depth_col: depth column
'''
logs = logs.fillna(0)
if Depth == False:
logs['Depth'] = logs.index
Depth = 'Depth'
ztop=logs.Depth.min(); zbot=logs.Depth.max()
else:
logs['Depth'] = logs[Depth]
Depth = 'Depth'
ztop=logs.Depth.min(); zbot=logs.Depth.max()
logs = logs.sort_values(by='Depth', ascending=True)
facies_colors = [
'#F4D03F', '#F5B041','#DC7633','#6E2C00','#1B4F72','#2E86C1',
'#AED6F1', '#A569BD', '#196F3D', '#10003D', '#A56222', '#000000'
]
facies_labels = [
'Sandstone', 'SS/SH', 'Shale', 'Marl', 'Dolomite',
'Limestone', 'Chalk', 'Halite', 'Anhydrite', 'Tuff', 'Coal', 'Basement'
]
facies_colormap = {}
for ind, label in enumerate(facies_labels):
facies_colormap[label] = facies_colors[ind]
no = 12
#no = len(list(dict(logs[target].value_counts())))
cmap_facies = colors.ListedColormap(
facies_colors[0 : no], 'indexed'
)
cluster=np.repeat(np.expand_dims(logs['Facies'].values,1), 100, 1)
f, ax = plt.subplots(nrows=1, ncols=6, figsize=(12, 12))
ax[0].plot(logs[x1], logs.Depth, '-g')
ax[1].plot(logs[x2], logs.Depth, '-')
ax[2].plot(logs[x3], logs.Depth, '-', color='0.5')
ax[3].plot(logs[x4], logs.Depth, '-', color='r')
ax[4].plot(logs[x5], logs.Depth, '-', color='black')
im=ax[5].imshow(cluster, interpolation='none', aspect='auto',
cmap=cmap_facies,vmin=0,vmax=12)
divider = make_axes_locatable(ax[5])
cax = divider.append_axes("right", size="20%", pad=0.05)
cbar=plt.colorbar(im, cax=cax)
cbar.set_label((7*' ').join([
'Sandstone', 'SS/SH', 'Shale', 'Marl', 'Dolomite',
'Limestone', 'Chalk', 'Halite', 'Anhydrite', 'Tuff', 'Coal', 'Basement'
]))
cbar.set_ticks(range(0,1)); cbar.set_ticklabels('')
for i in range(len(ax)-1):
ax[i].set_ylim(ztop,zbot)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=3)
ax[0].set_xlabel(x1)
ax[0].set_xlim(logs[x1].min(), logs[x1].max())
ax[1].set_xlabel(x2)
ax[1].set_xlim(logs[x2].min(), logs[x2].max())
ax[2].set_xlabel(x3)
ax[2].set_xlim(logs[x3].min(), logs[x3].max())
ax[3].set_xlabel(x4)
ax[3].set_xlim(logs[x4].min(), logs[x4].max())
ax[4].set_xlabel(x5)
ax[4].set_xlim(logs[x5].min(), logs[x5].max())
ax[5].set_xlabel('Facies')
ax[1].set_yticklabels([]); ax[2].set_yticklabels([]); ax[3].set_yticklabels([])
ax[4].set_yticklabels([]); ax[5].set_yticklabels([])
ax[5].set_xticklabels([])
f.suptitle('Well: %s'%logs.iloc[0]['WELL'], fontsize=14,y=0.94)
def compare_plots(logs, x1, x2, x3, x4, x5, Depth=False):
'''
Plots well logs against depth and corresponding predicted and actual
lithofacies in a labelled color plot for comparism
Arguments
---------
df: dataframe of well data with actual lithofacies as "Actual"
and predicted lithofacies as "Facies"
log1: str -> well log 1
''''''''''''''''''''
log5: str -> well log 2
depth_col: depth column
'''
logs = logs.fillna(0)
if Depth == False:
logs['Depth'] = logs.index
Depth = 'Depth'
ztop=logs.Depth.min(); zbot=logs.Depth.max()
else:
logs['Depth'] = logs[Depth]
Depth = 'Depth'
ztop=logs.Depth.min(); zbot=logs.Depth.max()
logs = logs.sort_values(by='Depth', ascending=True)
facies_colors = [
'#F4D03F', '#F5B041','#DC7633','#6E2C00','#1B4F72','#2E86C1',
'#AED6F1', '#A569BD', '#196F3D', '#10003D', '#A56222', '#000000'
]
facies_labels = [
'Sandstone', 'SS/SH', 'Shale', 'Marl', 'Dolomite',
'Limestone', 'Chalk', 'Halite', 'Anhydrite', 'Tuff', 'Coal', 'Basement'
]
facies_colormap = {}
for ind, label in enumerate(facies_labels):
facies_colormap[label] = facies_colors[ind]
no = 12
#no = len(list(dict(logs[target].value_counts())))
cmap_facies = colors.ListedColormap(
facies_colors[0 : no], 'indexed'
)
cluster1=np.repeat(np.expand_dims(logs['Facies'].values,1), 100, 1)
cluster2=np.repeat(np.expand_dims(logs['Actual'].values,1), 100, 1)
f, ax = plt.subplots(nrows=1, ncols=7, figsize=(12, 12))
ax[0].plot(logs[x1], logs.Depth, '-g')
ax[1].plot(logs[x2], logs.Depth, '-')
ax[2].plot(logs[x3], logs.Depth, '-', color='0.5')
ax[3].plot(logs[x4], logs.Depth, '-', color='r')
ax[4].plot(logs[x5], logs.Depth, '-', color='black')
im=ax[5].imshow(cluster1, interpolation='none', aspect='auto',
cmap=cmap_facies,vmin=0,vmax=12)
im=ax[6].imshow(cluster2, interpolation='none', aspect='auto',
cmap=cmap_facies,vmin=0,vmax=12)
divider = make_axes_locatable(ax[6])
cax = divider.append_axes("right", size="20%", pad=0.05)
cbar=plt.colorbar(im, cax=cax)
cbar.set_label((7*' ').join([
'Sandstone', 'SS/SH', 'Shale', 'Marl', 'Dolomite',
'Limestone', 'Chalk', 'Halite', 'Anhydrite', 'Tuff', 'Coal', 'Basement'
]))
cbar.set_ticks(range(0,1)); cbar.set_ticklabels('')
for i in range(len(ax)-2):
ax[i].set_ylim(ztop,zbot)
ax[i].invert_yaxis()
ax[i].grid()
ax[i].locator_params(axis='x', nbins=3)
ax[0].set_xlabel(x1)
ax[0].set_xlim(logs[x1].min(), logs[x1].max())
ax[1].set_xlabel(x2)
ax[1].set_xlim(logs[x2].min(), logs[x2].max())
ax[2].set_xlabel(x3)
ax[2].set_xlim(logs[x3].min(), logs[x3].max())
ax[3].set_xlabel(x4)
ax[3].set_xlim(logs[x4].min(), logs[x4].max())
ax[4].set_xlabel(x5)
ax[4].set_xlim(logs[x5].min(), logs[x5].max())
ax[5].set_xlabel('Predictions')
ax[6].set_xlabel('Actual')
ax[1].set_yticklabels([]); ax[2].set_yticklabels([]); ax[3].set_yticklabels([])
ax[4].set_yticklabels([]); ax[5].set_yticklabels([]); ax[6].set_yticklabels([])
ax[5].set_xticklabels([]); ax[6].set_xticklabels([]);
f.suptitle('Well: %s'%logs.iloc[0]['WELL'], fontsize=14,y=0.94)
| 32.684451
| 91
| 0.542139
| 3,035
| 21,441
| 3.752554
| 0.093904
| 0.06559
| 0.017385
| 0.020546
| 0.878128
| 0.871806
| 0.861445
| 0.854684
| 0.830275
| 0.811836
| 0
| 0.038891
| 0.290052
| 21,441
| 656
| 92
| 32.684451
| 0.709302
| 0.169861
| 0
| 0.778338
| 0
| 0
| 0.117321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020151
| false
| 0
| 0.015113
| 0
| 0.035264
| 0.020151
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3518aea3193561a4de3b8d33e698b3202d70ee9c
| 102
|
py
|
Python
|
flanautils/data_structures/__init__.py
|
AlberLC/flanautils
|
e7fe5ca5b941cb680ade9714c311de56bf81c2de
|
[
"MIT"
] | null | null | null |
flanautils/data_structures/__init__.py
|
AlberLC/flanautils
|
e7fe5ca5b941cb680ade9714c311de56bf81c2de
|
[
"MIT"
] | null | null | null |
flanautils/data_structures/__init__.py
|
AlberLC/flanautils
|
e7fe5ca5b941cb680ade9714c311de56bf81c2de
|
[
"MIT"
] | null | null | null |
from flanautils.data_structures.bi_dict import *
from flanautils.data_structures.ordered_set import *
| 34
| 52
| 0.862745
| 14
| 102
| 6
| 0.642857
| 0.333333
| 0.428571
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 102
| 2
| 53
| 51
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
351d80aa692796947039fd768cbecde1b8036d89
| 176
|
py
|
Python
|
src/prefect/engine/results/__init__.py
|
Digiterre/prefect
|
3bc254d759b4a86d215b320ce14085898085e1f8
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-10T14:32:32.000Z
|
2020-05-10T14:32:32.000Z
|
src/prefect/engine/results/__init__.py
|
Digiterre/prefect
|
3bc254d759b4a86d215b320ce14085898085e1f8
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/prefect/engine/results/__init__.py
|
Digiterre/prefect
|
3bc254d759b4a86d215b320ce14085898085e1f8
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from prefect.engine.results.constant_result import ConstantResult
from prefect.engine.results.gcs_result import GCSResult
from prefect.engine.results.s3_result import S3Result
| 44
| 65
| 0.880682
| 24
| 176
| 6.333333
| 0.5
| 0.217105
| 0.335526
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.068182
| 176
| 3
| 66
| 58.666667
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3525f5630f27b89ff2d7fad94935864a146f4482
| 688
|
py
|
Python
|
chapter05/alien_colors3.py
|
rafalmaziejuk/PythonCrashCourseSolutions
|
733a393c8ee0eeb4212ee7c1cd629c62eacb7cc4
|
[
"MIT"
] | null | null | null |
chapter05/alien_colors3.py
|
rafalmaziejuk/PythonCrashCourseSolutions
|
733a393c8ee0eeb4212ee7c1cd629c62eacb7cc4
|
[
"MIT"
] | null | null | null |
chapter05/alien_colors3.py
|
rafalmaziejuk/PythonCrashCourseSolutions
|
733a393c8ee0eeb4212ee7c1cd629c62eacb7cc4
|
[
"MIT"
] | null | null | null |
alien_color = 'green'
print("alien_color = " + alien_color)
if alien_color == 'green':
print("You earned 5 points!")
elif alien_color == 'yellow':
print("You earned 10 points!")
else:
print("You earned 15 points!")
alien_color = 'yellow'
print("\nalien_color = " + alien_color)
if alien_color == 'green':
print("You earned 5 points!")
elif alien_color == 'yellow':
print("You earned 10 points!")
else:
print("You earned 15 points!")
alien_color = 'red'
print("\nalien_color = " + alien_color)
if alien_color == 'green':
print("You earned 5 points!")
elif alien_color == 'yellow':
print("You earned 10 points!")
else:
print("You earned 15 points!")
| 23.724138
| 39
| 0.659884
| 96
| 688
| 4.572917
| 0.166667
| 0.296128
| 0.287016
| 0.182232
| 0.922551
| 0.922551
| 0.922551
| 0.922551
| 0.922551
| 0.922551
| 0
| 0.026738
| 0.184593
| 688
| 29
| 40
| 23.724138
| 0.755793
| 0
| 0
| 0.833333
| 0
| 0
| 0.404935
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
1049bb9ff190e2bca33ecb3a2c86f1cbd5a071aa
| 286
|
py
|
Python
|
tests/test_utils.py
|
sbg/seventweets
|
a2733ff175e8be6e34f346a9a14f7f30ab24ea74
|
[
"Apache-2.0"
] | 2
|
2017-06-12T11:00:38.000Z
|
2018-06-21T07:54:38.000Z
|
tests/test_utils.py
|
sbg/seventweets
|
a2733ff175e8be6e34f346a9a14f7f30ab24ea74
|
[
"Apache-2.0"
] | null | null | null |
tests/test_utils.py
|
sbg/seventweets
|
a2733ff175e8be6e34f346a9a14f7f30ab24ea74
|
[
"Apache-2.0"
] | null | null | null |
from seventweets.utils import generate_api_token
def test_generate_api_token():
# since this should be random, just test if two consequent calls
# did not return same result
assert generate_api_token() != generate_api_token()
assert type(generate_api_token()) is str
| 28.6
| 68
| 0.762238
| 42
| 286
| 4.928571
| 0.666667
| 0.2657
| 0.386473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178322
| 286
| 9
| 69
| 31.777778
| 0.880851
| 0.311189
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10707e1215eed0d2523877e14079cbdc61b6039f
| 1,913
|
py
|
Python
|
upload/decorators.py
|
LCBRU/lbrc_upload
|
be42fef97b67c1f25329db52ae3a88eb293a1203
|
[
"MIT"
] | null | null | null |
upload/decorators.py
|
LCBRU/lbrc_upload
|
be42fef97b67c1f25329db52ae3a88eb293a1203
|
[
"MIT"
] | null | null | null |
upload/decorators.py
|
LCBRU/lbrc_upload
|
be42fef97b67c1f25329db52ae3a88eb293a1203
|
[
"MIT"
] | null | null | null |
from functools import wraps
from flask import request, abort
from flask_login import current_user
from upload.model import Study, Upload, UploadFile
def must_be_study_owner():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
study = Study.query.get_or_404(request.view_args.get("study_id"))
if current_user not in study.owners:
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def must_be_upload_study_owner(var_name):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
all_args = {**request.view_args, **request.args, **request.form}
upload = Upload.query.get_or_404(all_args.get(var_name))
if current_user not in upload.study.owners:
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def must_be_upload_file_study_owner(var_name):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
all_args = {**request.view_args, **request.args, **request.form}
upload_file = UploadFile.query.get_or_404(all_args.get(var_name))
if current_user not in upload_file.upload.study.owners:
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def must_be_study_collaborator():
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
study = Study.query.get_or_404(request.view_args.get("study_id"))
if current_user not in study.collaborators:
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
| 27.328571
| 78
| 0.605855
| 230
| 1,913
| 4.808696
| 0.186957
| 0.122966
| 0.03255
| 0.065099
| 0.818264
| 0.818264
| 0.818264
| 0.818264
| 0.818264
| 0.818264
| 0
| 0.017964
| 0.30162
| 1,913
| 69
| 79
| 27.724638
| 0.80988
| 0
| 0
| 0.695652
| 0
| 0
| 0.008677
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26087
| false
| 0
| 0.086957
| 0
| 0.608696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
1083d5d7b91e92e02450e2b5f02c0af1bdb31adf
| 184
|
py
|
Python
|
notecoin/huobi/dataset/download_symbol.py
|
notechats/notecoin
|
57e1ed71567ce8864158f24c00ed47addbd9851f
|
[
"Apache-2.0"
] | null | null | null |
notecoin/huobi/dataset/download_symbol.py
|
notechats/notecoin
|
57e1ed71567ce8864158f24c00ed47addbd9851f
|
[
"Apache-2.0"
] | null | null | null |
notecoin/huobi/dataset/download_symbol.py
|
notechats/notecoin
|
57e1ed71567ce8864158f24c00ed47addbd9851f
|
[
"Apache-2.0"
] | 1
|
2022-03-26T11:42:18.000Z
|
2022-03-26T11:42:18.000Z
|
from notecoin.huobi.history.core import (load_daily_all, load_symbol_all,
load_symbol_all_to_db)
#load_symbol_all()
load_symbol_all_to_db()
| 23
| 73
| 0.652174
| 25
| 184
| 4.24
| 0.48
| 0.377358
| 0.490566
| 0.45283
| 0.566038
| 0.566038
| 0.566038
| 0.566038
| 0
| 0
| 0
| 0
| 0.282609
| 184
| 7
| 74
| 26.285714
| 0.80303
| 0.092391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1086a76333d9a6b6c181b316b2567e89bce3244d
| 173
|
py
|
Python
|
elsie/render/backends/__init__.py
|
spirali/elsie
|
c221ed68f85d1d5c80b1a58bfc7a473eb1b712d5
|
[
"MIT"
] | 30
|
2018-05-21T13:26:47.000Z
|
2021-11-08T11:25:13.000Z
|
elsie/render/backends/__init__.py
|
spirali/elsie
|
c221ed68f85d1d5c80b1a58bfc7a473eb1b712d5
|
[
"MIT"
] | 30
|
2019-01-04T21:21:41.000Z
|
2022-02-05T18:05:30.000Z
|
elsie/render/backends/__init__.py
|
spirali/elsie
|
c221ed68f85d1d5c80b1a58bfc7a473eb1b712d5
|
[
"MIT"
] | 4
|
2018-08-17T04:59:40.000Z
|
2021-01-02T17:03:30.000Z
|
from .backend import Backend # noqa
from .svg.backend import InkscapeBackend # noqa
try:
from .cairo.backend import CairoBackend # noqa
except ImportError:
pass
| 21.625
| 51
| 0.745665
| 21
| 173
| 6.142857
| 0.571429
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196532
| 173
| 7
| 52
| 24.714286
| 0.928058
| 0.080925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
52a46a7fb2e7867caded94f5b6fbecf088dee847
| 200
|
py
|
Python
|
packages/gtmcore/gtmcore/gitlib/__init__.py
|
gigabackup/gigantum-client
|
70fe6b39b87b1c56351f2b4c551b6f1693813e4f
|
[
"MIT"
] | 60
|
2018-09-26T15:46:00.000Z
|
2021-10-10T02:37:14.000Z
|
packages/gtmcore/gtmcore/gitlib/__init__.py
|
gigabackup/gigantum-client
|
70fe6b39b87b1c56351f2b4c551b6f1693813e4f
|
[
"MIT"
] | 1,706
|
2018-09-26T16:11:22.000Z
|
2021-08-20T13:37:59.000Z
|
packages/gtmcore/gtmcore/gitlib/__init__.py
|
griffinmilsap/gigantum-client
|
70fe6b39b87b1c56351f2b4c551b6f1693813e4f
|
[
"MIT"
] | 11
|
2019-03-14T13:23:51.000Z
|
2022-01-25T01:29:16.000Z
|
from gtmcore.gitlib.git import get_git_interface, GitAuthor, GitRepoInterface, RepoLocation
from gtmcore.gitlib.git_fs import GitFilesystem
from gtmcore.gitlib.git_fs_shim import GitFilesystemShimmed
| 50
| 91
| 0.88
| 26
| 200
| 6.576923
| 0.538462
| 0.192982
| 0.298246
| 0.350877
| 0.25731
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 200
| 3
| 92
| 66.666667
| 0.924324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eac23d57724ba8bf24d0375f46866d48c92514a7
| 261
|
py
|
Python
|
DMOJ/TSOC/TSOC_15_C2P2_Diamonds.py
|
Togohogo1/pg
|
ee3c36acde47769c66ee13a227762ee677591375
|
[
"MIT"
] | null | null | null |
DMOJ/TSOC/TSOC_15_C2P2_Diamonds.py
|
Togohogo1/pg
|
ee3c36acde47769c66ee13a227762ee677591375
|
[
"MIT"
] | 1
|
2021-10-14T18:26:56.000Z
|
2021-10-14T18:26:56.000Z
|
DMOJ/TSOC/TSOC_15_C2P2_Diamonds.py
|
Togohogo1/pg
|
ee3c36acde47769c66ee13a227762ee677591375
|
[
"MIT"
] | 1
|
2021-08-06T03:39:55.000Z
|
2021-08-06T03:39:55.000Z
|
N = int(input())
print("*"*N)
for i in range(N//2-1):
print("*"*((N//2-i)) + " "*(N-2*(N//2-i)) + "*"*((N//2-i)))
print("*" + " "*(N-2) + "*")
for i in range(N//2-2, -1, -1):
print("*"*((N//2-i)) + " "*(N-2*(N//2-i)) + "*"*((N//2-i)))
print("*"*N)
| 18.642857
| 63
| 0.344828
| 50
| 261
| 1.8
| 0.2
| 0.244444
| 0.2
| 0.177778
| 0.8
| 0.8
| 0.511111
| 0.511111
| 0.511111
| 0.511111
| 0
| 0.069767
| 0.176245
| 261
| 13
| 64
| 20.076923
| 0.348837
| 0
| 0
| 0.5
| 0
| 0
| 0.042146
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.625
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d806d45e392d39815d83d285ce68adbc56a8ca8a
| 1,132
|
py
|
Python
|
futureWork/bertConstitution/pybert/io/utils.py
|
multimedia-application-course/HeyConstitution
|
c8a12e38bc48bf8b7d816508b24e428c251f3bad
|
[
"MIT"
] | 1
|
2021-01-28T08:16:15.000Z
|
2021-01-28T08:16:15.000Z
|
futureWork/bertConstitution/pybert/io/utils.py
|
multimedia-application-course/HeyConstitution
|
c8a12e38bc48bf8b7d816508b24e428c251f3bad
|
[
"MIT"
] | null | null | null |
futureWork/bertConstitution/pybert/io/utils.py
|
multimedia-application-course/HeyConstitution
|
c8a12e38bc48bf8b7d816508b24e428c251f3bad
|
[
"MIT"
] | 1
|
2021-05-10T09:50:12.000Z
|
2021-05-10T09:50:12.000Z
|
import torch
def collate_fn(batch):
"""
batch should be a list of (sequence, target, length) tuples...
Returns a padded tensor of sequences sorted from longest to shortest,
"""
all_input_ids, all_input_mask, all_segment_ids, all_label_ids,all_input_lens = map(torch.stack, zip(*batch))
max_len = max(all_input_lens).item()
all_input_ids = all_input_ids[:, :max_len]
all_input_mask = all_input_mask[:, :max_len]
all_segment_ids = all_segment_ids[:, :max_len]
return all_input_ids, all_input_mask, all_segment_ids, all_label_ids
def xlnet_collate_fn(batch):
"""
batch should be a list of (sequence, target, length) tuples...
Returns a padded tensor of sequences sorted from longest to shortest,
"""
all_input_ids, all_input_mask, all_segment_ids, all_label_ids,all_input_lens = map(torch.stack, zip(*batch))
max_len = max(all_input_lens).item()
all_input_ids = all_input_ids[:, -max_len:]
all_input_mask = all_input_mask[:, -max_len:]
all_segment_ids = all_segment_ids[:, -max_len:]
return all_input_ids, all_input_mask, all_segment_ids, all_label_ids
| 43.538462
| 112
| 0.732332
| 181
| 1,132
| 4.165746
| 0.21547
| 0.212202
| 0.116711
| 0.111406
| 0.970822
| 0.970822
| 0.970822
| 0.970822
| 0.970822
| 0.970822
| 0
| 0
| 0.166078
| 1,132
| 25
| 113
| 45.28
| 0.798729
| 0.234099
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d80ae9813f8cd6f6f3cf9ade79484f19b967053f
| 25,955
|
py
|
Python
|
leo/modes/md.py
|
ATikhonov2/leo-editor
|
225aac990a9b2804aaa9dea29574d6e072e30474
|
[
"MIT"
] | 1,550
|
2015-01-14T16:30:37.000Z
|
2022-03-31T08:55:58.000Z
|
leo/modes/md.py
|
ATikhonov2/leo-editor
|
225aac990a9b2804aaa9dea29574d6e072e30474
|
[
"MIT"
] | 2,009
|
2015-01-13T16:28:52.000Z
|
2022-03-31T18:21:48.000Z
|
leo/modes/md.py
|
ATikhonov2/leo-editor
|
225aac990a9b2804aaa9dea29574d6e072e30474
|
[
"MIT"
] | 200
|
2015-01-05T15:07:41.000Z
|
2022-03-07T17:05:01.000Z
|
# Leo colorizer control file for md mode.
# This file is in the public domain.
# Properties for md mode.
# Important: most of this file is actually an html colorizer.
properties = {
"commentEnd": "-->",
"commentStart": "<!--",
"indentSize": "4",
"maxLineLen": "120",
"tabSize": "4",
}
# Attributes dict for md_main ruleset.
md_main_attributes_dict = {
"default": "null",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for md_inline_markup ruleset.
md_inline_markup_attributes_dict = {
"default": "MARKUP",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for md_block_html_tags ruleset.
md_block_html_tags_attributes_dict = {
"default": "MARKUP",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "true",
"no_word_sep": "",
}
# Attributes dict for md_markdown ruleset.
md_markdown_attributes_dict = {
"default": "MARKUP",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Attributes dict for md_link_label_definition ruleset.
md_link_label_definition_attributes_dict = {
"default": "KEYWORD3",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Attributes dict for md_link_inline_url_title ruleset.
md_link_inline_url_title_attributes_dict = {
"default": "KEYWORD3",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Attributes dict for md_link_inline_url_title_close ruleset.
md_link_inline_url_title_close_attributes_dict = {
"default": "KEYWORD3",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Attributes dict for md_link_inline_label_close ruleset.
md_link_inline_label_close_attributes_dict = {
"default": "LABEL",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Attributes dict for md_markdown_blockquote ruleset.
md_markdown_blockquote_attributes_dict = {
"default": "LABEL",
"digit_re": "",
"escape": "",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Dictionary of attributes dictionaries for md mode.
attributesDictDict = {
"md_block_html_tags": md_block_html_tags_attributes_dict,
"md_inline_markup": md_inline_markup_attributes_dict,
"md_link_inline_label_close": md_link_inline_label_close_attributes_dict,
"md_link_inline_url_title": md_link_inline_url_title_attributes_dict,
"md_link_inline_url_title_close": md_link_inline_url_title_close_attributes_dict,
"md_link_label_definition": md_link_label_definition_attributes_dict,
"md_main": md_main_attributes_dict,
"md_markdown": md_markdown_attributes_dict,
"md_markdown_blockquote": md_markdown_blockquote_attributes_dict,
}
# Keywords dict for md_main ruleset.
md_main_keywords_dict = {}
# Keywords dict for md_inline_markup ruleset.
md_inline_markup_keywords_dict = {}
# Keywords dict for md_block_html_tags ruleset.
md_block_html_tags_keywords_dict = {}
# Keywords dict for md_markdown ruleset.
md_markdown_keywords_dict = {}
# Keywords dict for md_link_label_definition ruleset.
md_link_label_definition_keywords_dict = {}
# Keywords dict for md_link_inline_url_title ruleset.
md_link_inline_url_title_keywords_dict = {}
# Keywords dict for md_link_inline_url_title_close ruleset.
md_link_inline_url_title_close_keywords_dict = {}
# Keywords dict for md_link_inline_label_close ruleset.
md_link_inline_label_close_keywords_dict = {}
# Keywords dict for md_markdown_blockquote ruleset.
md_markdown_blockquote_keywords_dict = {}
# Dictionary of keywords dictionaries for md mode.
keywordsDictDict = {
"md_block_html_tags": md_block_html_tags_keywords_dict,
"md_inline_markup": md_inline_markup_keywords_dict,
"md_link_inline_label_close": md_link_inline_label_close_keywords_dict,
"md_link_inline_url_title": md_link_inline_url_title_keywords_dict,
"md_link_inline_url_title_close": md_link_inline_url_title_close_keywords_dict,
"md_link_label_definition": md_link_label_definition_keywords_dict,
"md_main": md_main_keywords_dict,
"md_markdown": md_markdown_keywords_dict,
"md_markdown_blockquote": md_markdown_blockquote_keywords_dict,
}
# Rules for md_main ruleset.
def md_heading(colorer,s,i):
# issue 386.
# print('md_heading',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="^[#]+",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_link(colorer,s,i):
# issue 386.
# print('md_link',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="\[[^]]+\]\([^)]+\)",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_star_emphasis1(colorer,s,i):
# issue 386.
# print('md_underscore_emphasis1',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="\\*[^\\s*][^*]*\\*",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_star_emphasis2(colorer,s,i):
# issue 386.
# print('md_star_emphasis2',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="\\*\\*[^*]+\\*\\*",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_underscore_emphasis1(colorer,s,i):
# issue 386.
# print('md_underscore_emphasis1',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="_[^_]+_",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_underline_equals(colorer,s,i):
# issue 386.
# print('md_underline_equals',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="^===[=]+$",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_underline_minus(colorer,s,i):
# issue 386.
# print('md_underline_minus',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="---[-]+$",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_underscore_emphasis2(colorer,s,i):
# issue 386.
# print('md_underscore_emphasis2',i)
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="__[^_]+__",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule0(colorer, s, i):
return colorer.match_span(s, i, kind="comment1", begin="<!--", end="-->",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule1(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<script", end="</script>",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="html::javascript",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule2(colorer, s, i):
return colorer.match_seq_regexp(s, i, kind="markup", regexp="<hr\\b([^<>])*?/?>",
at_line_start=True, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule3(colorer, s, i):
return colorer.match_span_regexp(s, i, kind="markup", begin="<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|noscript|form|fieldset|iframe|math|ins|del)\\b", end="</$1>",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="md::block_html_tags",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule4(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq=" < ",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule5(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<", end=">",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::inline_markup",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
# Rules dict for md_main ruleset.
rulesDict1 = {
"#": [md_heading,], # Issue #386.
"[": [md_link,], # issue 386.
"*": [md_star_emphasis2, md_star_emphasis1,], # issue 386. Order important
"=": [md_underline_equals,], # issue 386.
"-": [md_underline_minus,], # issue 386.
"_": [md_underscore_emphasis2, md_underscore_emphasis1,], # issue 386. Order important.
" ": [md_rule4,],
"<": [md_rule0,md_rule1,md_rule2,md_rule3,md_rule5,],
}
# Rules for md_inline_markup ruleset.
# Rules dict for md_inline_markup ruleset.
rulesDict2 = {}
# Rules for md_block_html_tags ruleset.
if 0: # Rules 6 & 7 will never match?
def md_rule6(colorer, s, i):
return colorer.match_eol_span_regexp(s, i, kind="invalid", regexp="[\\S]+",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule7(colorer, s, i):
return colorer.match_eol_span_regexp(s, i, kind="invalid", regexp="{1,3}[\\S]+",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule8(colorer, s, i):
# leadin: [ \t]
return colorer.match_eol_span_regexp(s, i, kind="", regexp="( {4}|\\t)",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="html::main", exclude_match=False)
def md_rule9(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule10(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="'", end="'",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule11(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="=",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
# Rules dict for md_block_html_tags ruleset.
rulesDict3 = {
" ": [md_rule8], # new
"\t":[md_rule8], # new
"\"": [md_rule9,],
"'": [md_rule10,],
# "(": [md_rule8,],
"=": [md_rule11,],
# "[": [md_rule6,], # Will never fire: the leadin character is any non-space!
# "{": [md_rule7,], # Will never fire: the leading character is any non-space!
}
# Rules for md_markdown ruleset.
def md_rule12(colorer, s, i):
# Leadins: [ \t>]
return colorer.match_eol_span_regexp(s, i, kind="", regexp="[ \\t]*(>[ \\t]{1})+",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="md::markdown_blockquote", exclude_match=False)
def md_rule13(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq="*",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule14(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq="_",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule15(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq="\\][",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule16(colorer, s, i):
return colorer.match_seq_regexp(s, i, kind="null", regexp="\\\\[\\Q*_\\`[](){}#+.!-\\E]",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule17(colorer, s, i):
return colorer.match_span(s, i, kind="literal2", begin="``` ruby", end="```",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="ruby::main",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule18(colorer, s, i):
return colorer.match_span(s, i, kind="literal2", begin="```", end="```",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule19(colorer, s, i):
# leadin: `
return colorer.match_span_regexp(s, i, kind="literal2", begin="(`{1,2})", end="$1",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule20(colorer, s, i):
# Leadins are [ \t]
return colorer.match_eol_span_regexp(s, i, kind="literal2", regexp="( {4,}|\\t+)\\S",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule21(colorer, s, i):
# Leadins are [=-]
return colorer.match_eol_span_regexp(s, i, kind="keyword1", regexp="[=-]+",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule22(colorer, s, i):
# Leadin is #
return colorer.match_eol_span_regexp(s, i, kind="keyword1", regexp="#{1,6}[ \\t]*(.+?)",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule23(colorer, s, i):
# Leadins are [ \t -_*]
return colorer.match_eol_span_regexp(s, i, kind="keyword1", regexp="[ ]{0,2}([ ]?[-_*][ ]?){3,}[ \\t]*",
at_line_start=True, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule24(colorer, s, i):
# Leadins are [ \t*+-]
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="[ \\t]{0,}[*+-][ \\t]+",
at_line_start=True, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule25(colorer, s, i):
# Leadins are [ \t0123456789]
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="[ \\t]{0,}\\d+\\.[ \\t]+",
at_line_start=True, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule26(colorer, s, i):
return colorer.match_eol_span_regexp(s, i, kind="label", regexp="\\[(.*?)\\]\\:",
at_line_start=False, at_whitespace_end=True, at_word_start=False,
delegate="md::link_label_definition", exclude_match=False)
def md_rule27(colorer, s, i):
# leadin: [
return colorer.match_span_regexp(s, i, kind="keyword4", begin="!?\\[[\\p{Alnum}\\p{Blank}]*", end="\\]",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::link_inline_url_title",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
def md_rule28(colorer, s, i):
# Leadins: [*_]
return colorer.match_span_regexp(s, i, kind="literal3", begin="(\\*\\*|__)", end="$1",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
def md_rule29(colorer, s, i):
# Leadins: [*_]
return colorer.match_span_regexp(s, i, kind="literal4", begin="(\\*|_)", end="$1",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
# Rules dict for md_markdown ruleset.
rulesDict4 = {
# Existing leadins...
"!": [md_rule27,],
"#": [md_rule22,],
"*": [md_rule13,md_rule23,md_rule24,md_rule28,md_rule29], # new: 23,24,28,29.
"\\": [md_rule15,md_rule16,md_rule26,],
"_": [md_rule14,md_rule23,md_rule24,md_rule28,md_rule29], # new: 23,24,28,29.
"`": [md_rule17,md_rule18,md_rule19,], # new: 19
"[": [md_rule27,], # new: 27 old: 12,21,23,24,25.
# Unused leadins...
# "(": [md_rule28,md_rule29,],
# New leadins...
" ": [md_rule12,md_rule20,md_rule23,md_rule24,md_rule25,],
"\t":[md_rule12,md_rule20,md_rule23,md_rule24,md_rule25],
">":[md_rule12,],
"=":[md_rule21,],
"-":[md_rule21,md_rule23,md_rule24],
"0":[md_rule25,],
"1":[md_rule25,],
"2":[md_rule25,],
"3":[md_rule25,],
"4":[md_rule25,],
"5":[md_rule25,],
"6":[md_rule25,],
"7":[md_rule25,],
"8":[md_rule25,],
"9":[md_rule25,],
}
# Rules for md_link_label_definition ruleset.
def md_rule30(colorer, s, i):
return colorer.match_seq_regexp(s, i, kind="null", regexp="\\\\[\\Q*_\\`[](){}#+.!-\\E]",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule31(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="\"",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule32(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="(",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule33(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq=")",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
# Rules dict for md_link_label_definition ruleset.
rulesDict5 = {
"\"": [md_rule31,],
"(": [md_rule32,],
")": [md_rule33,],
"\\": [md_rule30,],
}
# Rules for md_link_inline_url_title ruleset.
def md_rule34(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="]",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule35(colorer, s, i):
return colorer.match_span_regexp(s, i, kind="keyword4", begin="\\[", end="\\]",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::link_inline_label_close",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
def md_rule36(colorer, s, i):
return colorer.match_span_regexp(s, i, kind="keyword4", begin="\\(", end="\\)",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::link_inline_url_title_close",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
# Rules dict for md_link_inline_url_title ruleset.
rulesDict6 = {
"(": [md_rule36,],
"[": [md_rule35,],
"]": [md_rule34,],
}
# Rules for md_link_inline_url_title_close ruleset.
def md_rule37(colorer, s, i):
return colorer.match_eol_span(s, i, kind="null", seq=")",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::main", exclude_match=False)
# Rules dict for md_link_inline_url_title_close ruleset.
rulesDict7 = {
")": [md_rule37,],
}
# Rules for md_link_inline_label_close ruleset.
def md_rule38(colorer, s, i):
return colorer.match_eol_span(s, i, kind="null", seq="]",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::main", exclude_match=False)
# Rules dict for md_link_inline_label_close ruleset.
rulesDict8 = {
"]": [md_rule38,],
}
# Rules for md_markdown_blockquote ruleset.
def md_rule39(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq=" < ",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule40(colorer, s, i):
return colorer.match_span(s, i, kind="markup", begin="<", end=">",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::inline_markup",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule41(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq="*",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule42(colorer, s, i):
return colorer.match_seq(s, i, kind="null", seq="_",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule43(colorer, s, i):
# leadin: backslash.
return colorer.match_seq(s, i, kind="null", seq="\\][",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule44(colorer, s, i):
# leadin: backslash.
return colorer.match_seq_regexp(s, i, kind="null", regexp="\\\\[\\Q*_\\`[](){}#+.!-\\E]",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule45(colorer, s, i):
# leadin: `
return colorer.match_span_regexp(s, i, kind="literal2", begin="(`{1,2})", end="$1",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule46(colorer, s, i):
# leadins: [ \t]
return colorer.match_eol_span_regexp(s, i, kind="literal2", regexp="( {4,}|\\t+)\\S",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule47(colorer, s, i):
# leadins: [=-]
return colorer.match_eol_span_regexp(s, i, kind="keyword1", regexp="[=-]+",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule48(colorer, s, i):
# leadin: #
return colorer.match_eol_span_regexp(s, i, kind="keyword1", regexp="#{1,6}[ \\t]*(.+?)",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule49(colorer, s, i):
# leadins: [ -_*]
return colorer.match_eol_span_regexp(s, i, kind="keyword1", regexp="[ ]{0,2}([ ]?[-_*][ ]?){3,}[ \\t]*",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def md_rule50(colorer, s, i):
# leadins: [ \t*+-]
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="[ \\t]{0,}[*+-][ \\t]+",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule51(colorer, s, i):
# leadins: [ \t0123456789]
return colorer.match_seq_regexp(s, i, kind="keyword2", regexp="[ \\t]{0,}\\d+\\.[ \\t]+",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def md_rule52(colorer, s, i):
# leadin: [
return colorer.match_eol_span_regexp(s, i, kind="label", regexp="\\[(.*?)\\]\\:",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::link_label_definition", exclude_match=False)
def md_rule53(colorer, s, i):
# leadin: [
return colorer.match_span_regexp(s, i, kind="keyword4", begin="!?\\[[\\p{Alnum}\\p{Blank}]*", end="\\]",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="md::link_inline_url_title",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
def md_rule54(colorer, s, i):
# leadins: [*_]
return colorer.match_span_regexp(s, i, kind="literal3", begin="(\\*\\*|__)", end="$1",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def md_rule55(colorer, s, i):
# leadins: [*_]
return colorer.match_span_regexp(s, i, kind="literal4", begin="(\\*|_)", end="$1",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
# Rules dict for md_markdown_blockquote ruleset.
rulesDict9 = {
# old, unused.
# "!": [], # 53
# "[": [], # 47,49,50,51,
" ": [md_rule39,md_rule46,md_rule49,md_rule50], # new: 46,49,50
"\t":[md_rule46,md_rule50,], # new: 46,50
"#": [md_rule48,],
"(": [md_rule54,md_rule55,], # 45,46
"*": [md_rule41,md_rule49,md_rule50,md_rule54,md_rule55,], # new: 49,50,54,55
"<": [md_rule40,],
"\\": [md_rule43,md_rule44,], # 52,53
"_": [md_rule42,md_rule49,md_rule54,md_rule55,], # new: 49,54,55
# new leadins:
"+":[md_rule50,],
"-":[md_rule47,md_rule49,md_rule50,],
"=":[md_rule47,],
"[":[md_rule52,md_rule53],
"`":[md_rule45,],
"0":[md_rule50,],
"1":[md_rule50,],
"2":[md_rule50,],
"3":[md_rule50,],
"4":[md_rule50,],
"5":[md_rule50,],
"6":[md_rule50,],
"7":[md_rule50,],
"8":[md_rule50,],
"9":[md_rule50,],
}
# x.rulesDictDict for md mode.
rulesDictDict = {
"md_block_html_tags": rulesDict3,
"md_inline_markup": rulesDict2,
"md_link_inline_label_close": rulesDict8,
"md_link_inline_url_title": rulesDict6,
"md_link_inline_url_title_close": rulesDict7,
"md_link_label_definition": rulesDict5,
"md_main": rulesDict1,
"md_markdown": rulesDict4,
"md_markdown_blockquote": rulesDict9,
}
# Import dict for md mode.
importDict = {
"md_inline_markup": ["html::tags",],
"md_link_label_definition": ["md_link_label_definition::markdown",],
"md_main": ["md_main::markdown",],
}
| 39.147813
| 172
| 0.658987
| 3,598
| 25,955
| 4.39383
| 0.068649
| 0.016193
| 0.036435
| 0.064773
| 0.847808
| 0.817256
| 0.797141
| 0.764438
| 0.74135
| 0.71687
| 0
| 0.026744
| 0.180274
| 25,955
| 662
| 173
| 39.206949
| 0.7163
| 0.125409
| 0
| 0.43326
| 0
| 0.002188
| 0.135484
| 0.038697
| 0
| 0
| 0
| 0
| 0
| 1
| 0.140044
| false
| 0
| 0.002188
| 0.140044
| 0.282276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
d81c7e96f89927b945b4f69ba744d4aba1de1e46
| 478
|
py
|
Python
|
apps/API/permissions.py
|
ExpoAshique/ProveBanking__s
|
f0b45fffea74d00d14014be27aa50fe5f42f6903
|
[
"MIT"
] | null | null | null |
apps/API/permissions.py
|
ExpoAshique/ProveBanking__s
|
f0b45fffea74d00d14014be27aa50fe5f42f6903
|
[
"MIT"
] | null | null | null |
apps/API/permissions.py
|
ExpoAshique/ProveBanking__s
|
f0b45fffea74d00d14014be27aa50fe5f42f6903
|
[
"MIT"
] | null | null | null |
from rest_framework import permissions
class IsClient(permissions.BasePermission):
def has_permission(self, request, view):
return request.user.is_client
class IsVendor(permissions.BasePermission):
def has_permission(self, request, view):
return request.user.is_vendor
class VendorReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS or request.user.is_client
| 28.117647
| 83
| 0.771967
| 57
| 478
| 6.333333
| 0.45614
| 0.207756
| 0.232687
| 0.257618
| 0.606648
| 0.606648
| 0.606648
| 0.606648
| 0.606648
| 0.606648
| 0
| 0
| 0.154812
| 478
| 16
| 84
| 29.875
| 0.893564
| 0
| 0
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0.1
| 0.3
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
dc9e896d6627bc0058fc879547e5dba4f4810de4
| 4,748
|
py
|
Python
|
openrl/models/models.py
|
natetsang/open-rl
|
426723d0d6759672ce77e02afeb55cbeb68fcfb0
|
[
"MIT"
] | 2
|
2021-09-10T18:52:35.000Z
|
2022-01-03T19:48:06.000Z
|
openrl/models/models.py
|
natetsang/open-rl
|
426723d0d6759672ce77e02afeb55cbeb68fcfb0
|
[
"MIT"
] | 1
|
2021-12-28T17:42:23.000Z
|
2021-12-28T17:42:23.000Z
|
openrl/models/models.py
|
natetsang/open-rl
|
426723d0d6759672ce77e02afeb55cbeb68fcfb0
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from tensorflow.keras import layers
def actor_critic_fc_discrete_network(state_dims: int,
num_actions: int,
num_hidden_layers: int,
hidden_size: int) -> tf.keras.Model:
"""
Creates a fully-connected Actor-Critic model for DISCRETE action spaces.
Input:
- state vector
Output:
- probability of taking each discrete action (Actor)
- value of being in the current state (Critic)
:param state_dims: The dimensionality of the observed state
:param num_actions: The number of discrete actions
:param num_hidden_layers: The number of hidden layers
:param hidden_size: The number of neurons in each hidden layer (all layers are same)
:return: tf.keras.Model
"""
inputs = layers.Input(shape=(state_dims,), name="input_layer")
# Create shared hidden layers
hidden = inputs
for i in range(num_hidden_layers):
hidden = layers.Dense(hidden_size, activation="relu", name=f"hidden_layer{i}")(hidden)
# Create output layers
action = layers.Dense(num_actions, activation="softmax", name="action_output_layer")(hidden)
critic = layers.Dense(1, name="critic_output_layer")(hidden)
model = tf.keras.Model(inputs=inputs, outputs=[action, critic])
return model
def actor_fc_discrete_network(state_dims: int,
num_actions: int,
num_hidden_layers: int,
hidden_size: int) -> tf.keras.Model:
"""
Creates a fully connected Actor model for DISCRETE action spaces.
Input:
- state vector
Output:
- probability of taking each discrete action
:param state_dims: The number of state dimensions
:param num_actions: The number of discrete actions
:param num_hidden_layers: The number of hidden layers
:param hidden_size: The number of neurons in each hidden layer (all layers are same)
:return: tf.keras.Model
"""
inputs = layers.Input(shape=(state_dims,), name="input_layer")
# Create shared hidden layers
hidden = inputs
for i in range(num_hidden_layers):
hidden = layers.Dense(hidden_size, activation="relu", name=f"hidden_layer{i}")(hidden)
# Create output layers
action = layers.Dense(num_actions, activation="softmax", name="action_output_layer")(hidden)
model = tf.keras.Model(inputs=inputs, outputs=action)
return model
def critic_fc_network(state_dims: int,
num_hidden_layers: int,
hidden_size: int) -> tf.keras.Model:
"""
Creates a fully connected Critic model.
Input:
- state vector
Output:
- value of being in the current state
:param state_dims: The number of state dimensions
:param num_hidden_layers: The number of hidden layers
:param hidden_size: The number of neurons in each hidden layer (all layers are same)
:return: tf.keras.Model
"""
inputs = layers.Input(shape=(state_dims,), name="input_layer")
# Create shared hidden layers
hidden = inputs
for i in range(num_hidden_layers):
hidden = layers.Dense(hidden_size, activation="relu", name=f"hidden_layer{i}")(hidden)
# Create output layers
critic = layers.Dense(1, name="critic_output_layer")(hidden)
model = tf.keras.Model(inputs=inputs, outputs=critic)
return model
def actor_fc_continuous_network(state_dims: int,
action_dims: int,
num_hidden_layers: int,
hidden_size: int) -> tf.keras.Model:
"""
Creates a fully-connected Actor model for CONTINUOUS action spaces.
Input:
- state vector
Output:
- mean action to take (for each action dim)
- standard deviation of action to take (for each action dim)
:param state_dims: The number of state dimensions
:param action_dims: The number of action dimensions
:param num_hidden_layers: The number of hidden layers
:param hidden_size: The number of neurons in each hidden layer (all layers are same)
:return: tf.keras.Model
"""
inputs = layers.Input(shape=(state_dims,), name="input_layer")
# Create shared hidden layers
hidden = inputs
for i in range(num_hidden_layers):
hidden = layers.Dense(hidden_size, activation="relu", name=f"hidden_layer{i}")(hidden)
mu = layers.Dense(action_dims, activation="tanh", name="mu")(hidden)
mu = layers.Lambda(lambda x: x * 2.0)(mu)
std = layers.Dense(action_dims, activation="softplus", name='std')(hidden)
model = tf.keras.Model(inputs=inputs, outputs=[mu, std])
return model
| 36.523077
| 96
| 0.659436
| 620
| 4,748
| 4.91129
| 0.129032
| 0.094581
| 0.050575
| 0.047291
| 0.882102
| 0.8578
| 0.829557
| 0.792118
| 0.778325
| 0.763547
| 0
| 0.001125
| 0.251264
| 4,748
| 129
| 97
| 36.806202
| 0.855415
| 0.39048
| 0
| 0.708333
| 0
| 0
| 0.084733
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.041667
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4edffc25f833fce9c43509e639cb64fcbaead7a
| 124,557
|
py
|
Python
|
underworld_graph.py
|
Coin-Miner-5000/support
|
3dd89e64ef85f890ec2649bc083ae2a3f6b72078
|
[
"MIT"
] | null | null | null |
underworld_graph.py
|
Coin-Miner-5000/support
|
3dd89e64ef85f890ec2649bc083ae2a3f6b72078
|
[
"MIT"
] | null | null | null |
underworld_graph.py
|
Coin-Miner-5000/support
|
3dd89e64ef85f890ec2649bc083ae2a3f6b72078
|
[
"MIT"
] | null | null | null |
underworld_graph = {
992: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(75,61)",
"elevation": 0,
"w": 966
},
966: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(74,61)",
"elevation": 0,
"e": 992,
"w": 960
},
960: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(73,61)",
"elevation": 0,
"e": 966,
"w": 956
},
956: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(72,61)",
"elevation": 0,
"e": 960,
"w": 902
},
902: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(71,61)",
"elevation": 0,
"e": 956,
"w": 874
},
874: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,61)",
"elevation": 0,
"e": 902,
"w": 762
},
762: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,61)",
"elevation": 0,
"e": 874,
"w": 728
},
728: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,61)",
"elevation": 0,
"n": 741,
"e": 762,
"w": 724
},
741: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,62)",
"elevation": 0,
"s": 728,
"e": 793
},
793: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,62)",
"elevation": 0,
"n": 808,
"e": 901,
"w": 741
},
808: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,63)",
"elevation": 0,
"n": 821,
"s": 793,
"e": 920
},
821: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,64)",
"elevation": 0,
"n": 974,
"s": 808,
"e": 953
},
974: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,65)",
"elevation": 0,
"s": 821
},
953: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,64)",
"elevation": 0,
"w": 821
},
920: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,63)",
"elevation": 0,
"e": 946,
"w": 808
},
946: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(71,63)",
"elevation": 0,
"w": 920
},
901: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,62)",
"elevation": 0,
"w": 793
},
724: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,61)",
"elevation": 0,
"n": 737,
"s": 748,
"e": 728,
"w": 711
},
737: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,62)",
"elevation": 0,
"n": 756,
"s": 724
},
756: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,63)",
"elevation": 0,
"s": 737,
"e": 868
},
868: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,63)",
"elevation": 0,
"n": 885,
"w": 756
},
885: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,64)",
"elevation": 0,
"s": 868
},
748: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,60)",
"elevation": 0,
"n": 724,
"s": 772,
"e": 764
},
772: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,59)",
"elevation": 0,
"n": 748,
"s": 780
},
780: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,58)",
"elevation": 0,
"n": 772,
"s": 818
},
818: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,57)",
"elevation": 0,
"n": 780,
"s": 877,
"e": 829
},
877: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,56)",
"elevation": 0,
"n": 818,
"s": 997,
"e": 937
},
997: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,55)",
"elevation": 0,
"n": 877
},
937: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,56)",
"elevation": 0,
"w": 877
},
829: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,57)",
"elevation": 0,
"e": 912,
"w": 818
},
912: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,57)",
"elevation": 0,
"w": 829
},
764: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,60)",
"elevation": 0,
"s": 769,
"e": 848,
"w": 748
},
769: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,59)",
"elevation": 0,
"n": 764,
"s": 799,
"e": 847
},
799: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,58)",
"elevation": 0,
"n": 769,
"e": 908
},
908: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,58)",
"elevation": 0,
"w": 799
},
847: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,59)",
"elevation": 0,
"w": 769
},
848: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,60)",
"elevation": 0,
"e": 853,
"w": 764
},
853: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,60)",
"elevation": 0,
"s": 958,
"e": 939,
"w": 848
},
958: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,59)",
"elevation": 0,
"n": 853,
"s": 972
},
972: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(70,58)",
"elevation": 0,
"n": 958
},
939: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(71,60)",
"elevation": 0,
"w": 853
},
711: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,61)",
"elevation": 0,
"n": 721,
"e": 724,
"w": 633
},
721: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,62)",
"elevation": 0,
"s": 711
},
633: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,61)",
"elevation": 0,
"e": 711,
"w": 623
},
623: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,61)",
"elevation": 0,
"n": 609,
"e": 633
},
609: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,62)",
"elevation": 0,
"n": 603,
"s": 623,
"e": 652
},
603: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,63)",
"elevation": 0,
"n": 618,
"s": 609,
"w": 520
},
618: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,64)",
"elevation": 0,
"s": 603,
"e": 631
},
631: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,64)",
"elevation": 0,
"s": 646,
"w": 618
},
646: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,63)",
"elevation": 0,
"n": 631,
"e": 662
},
662: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,63)",
"elevation": 0,
"n": 675,
"w": 646
},
675: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,64)",
"elevation": 0,
"s": 662,
"e": 768
},
768: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,64)",
"elevation": 0,
"w": 675
},
520: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,63)",
"elevation": 0,
"n": 579,
"e": 603,
"w": 519
},
579: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,64)",
"elevation": 0,
"n": 601,
"s": 520
},
601: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,65)",
"elevation": 0,
"n": 617,
"s": 579,
"e": 629
},
617: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,66)",
"elevation": 0,
"n": 645,
"s": 601
},
645: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,67)",
"elevation": 0,
"s": 617
},
629: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,65)",
"elevation": 0,
"n": 684,
"e": 667,
"w": 601
},
684: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,66)",
"elevation": 0,
"n": 718,
"s": 629,
"e": 687
},
718: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,67)",
"elevation": 0,
"n": 734,
"s": 684,
"e": 782
},
734: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,68)",
"elevation": 0,
"s": 718
},
782: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,67)",
"elevation": 0,
"w": 718
},
687: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,66)",
"elevation": 0,
"e": 806,
"w": 684
},
806: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,66)",
"elevation": 0,
"n": 909,
"w": 687
},
909: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,67)",
"elevation": 0,
"n": 910,
"s": 806,
"e": 917
},
910: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,68)",
"elevation": 0,
"s": 909
},
917: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,67)",
"elevation": 0,
"e": 929,
"w": 909
},
929: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,67)",
"elevation": 0,
"w": 917
},
667: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,65)",
"elevation": 0,
"e": 717,
"w": 629
},
717: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,65)",
"elevation": 0,
"e": 820,
"w": 667
},
820: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,65)",
"elevation": 0,
"n": 866,
"e": 876,
"w": 717
},
866: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,66)",
"elevation": 0,
"s": 820
},
876: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,65)",
"elevation": 0,
"w": 820
},
519: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,63)",
"elevation": 0,
"n": 563,
"s": 583,
"e": 520,
"w": 518
},
563: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,64)",
"elevation": 0,
"s": 519
},
583: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,62)",
"elevation": 0,
"n": 519,
"e": 595
},
595: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,62)",
"elevation": 0,
"w": 583
},
518: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,63)",
"elevation": 0,
"e": 519,
"w": 507
},
507: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,63)",
"elevation": 0,
"n": 514,
"s": 506,
"e": 518,
"w": 511
},
514: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,64)",
"elevation": 0,
"n": 521,
"s": 507,
"e": 515
},
521: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,65)",
"elevation": 0,
"n": 522,
"s": 514
},
522: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,66)",
"elevation": 0,
"n": 536,
"s": 521
},
536: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,67)",
"elevation": 0,
"n": 658,
"s": 522
},
658: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,68)",
"elevation": 0,
"n": 678,
"s": 536,
"e": 672
},
678: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,69)",
"elevation": 0,
"s": 658,
"e": 703
},
703: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,69)",
"elevation": 0,
"n": 709,
"e": 733,
"w": 678
},
709: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,70)",
"elevation": 0,
"n": 736,
"s": 703,
"e": 712
},
736: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,71)",
"elevation": 0,
"s": 709,
"e": 786
},
786: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,71)",
"elevation": 0,
"n": 798,
"e": 961,
"w": 736
},
798: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,72)",
"elevation": 0,
"n": 889,
"s": 786
},
889: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,73)",
"elevation": 0,
"n": 919,
"s": 798,
"e": 923,
"w": 915
},
919: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,74)",
"elevation": 0,
"s": 889
},
923: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,73)",
"elevation": 0,
"w": 889
},
915: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,73)",
"elevation": 0,
"e": 889
},
961: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,71)",
"elevation": 0,
"w": 786
},
712: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,70)",
"elevation": 0,
"e": 739,
"w": 709
},
739: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,70)",
"elevation": 0,
"w": 712
},
733: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,69)",
"elevation": 0,
"e": 740,
"w": 703
},
740: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,69)",
"elevation": 0,
"s": 770,
"e": 751,
"w": 733
},
770: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,68)",
"elevation": 0,
"n": 740
},
751: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,69)",
"elevation": 0,
"n": 810,
"e": 794,
"w": 740
},
810: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,70)",
"elevation": 0,
"s": 751
},
794: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,69)",
"elevation": 0,
"n": 802,
"s": 896,
"e": 841,
"w": 751
},
802: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,70)",
"elevation": 0,
"n": 830,
"s": 794,
"e": 865
},
830: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,71)",
"elevation": 0,
"s": 802
},
865: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,70)",
"elevation": 0,
"n": 924,
"e": 897,
"w": 802
},
924: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,71)",
"elevation": 0,
"s": 865,
"e": 979
},
979: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,71)",
"elevation": 0,
"w": 924
},
897: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,70)",
"elevation": 0,
"e": 986,
"w": 865
},
986: {
"title": "Snitch Board",
"description": "A generic board stands before you with a golden snitch carved into the top.",
"terrain": "NORMAL",
"coordinates": "(68,70)",
"elevation": 0,
"w": 897
},
896: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,68)",
"elevation": 0,
"n": 794
},
841: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,69)",
"elevation": 0,
"e": 962,
"w": 794
},
962: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,69)",
"elevation": 0,
"s": 963,
"w": 841
},
963: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,68)",
"elevation": 0,
"n": 962,
"e": 982
},
982: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,68)",
"elevation": 0,
"n": 995,
"w": 963
},
995: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,69)",
"elevation": 0,
"s": 982,
"e": 996
},
996: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(69,69)",
"elevation": 0,
"w": 995
},
672: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,68)",
"elevation": 0,
"w": 658
},
515: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,64)",
"elevation": 0,
"n": 576,
"w": 514
},
576: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,65)",
"elevation": 0,
"n": 582,
"s": 515,
"e": 578
},
582: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,66)",
"elevation": 0,
"n": 642,
"s": 576,
"e": 644
},
642: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,67)",
"elevation": 0,
"s": 582
},
644: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,66)",
"elevation": 0,
"n": 664,
"w": 582
},
664: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,67)",
"elevation": 0,
"n": 680,
"s": 644
},
680: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,68)",
"elevation": 0,
"s": 664
},
578: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,65)",
"elevation": 0,
"w": 576
},
506: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,62)",
"elevation": 0,
"n": 507,
"s": 504,
"e": 531,
"w": 529
},
504: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,61)",
"elevation": 0,
"n": 506,
"s": 500,
"e": 544,
"w": 523
},
500: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,60)",
"elevation": 0,
"n": 504,
"s": 502,
"e": 503,
"w": 501
},
502: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,59)",
"elevation": 0,
"n": 500,
"s": 508,
"e": 505,
"w": 509
},
508: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,58)",
"elevation": 0,
"n": 502,
"s": 561
},
561: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,57)",
"elevation": 0,
"n": 508,
"s": 571
},
571: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,56)",
"elevation": 0,
"n": 561,
"s": 584
},
584: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,55)",
"elevation": 0,
"n": 571,
"s": 669
},
669: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,54)",
"elevation": 0,
"n": 584,
"s": 695
},
695: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,53)",
"elevation": 0,
"n": 669,
"s": 757,
"e": 696
},
757: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,52)",
"elevation": 0,
"n": 695,
"s": 814
},
814: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,51)",
"elevation": 0,
"n": 757,
"s": 849
},
849: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,50)",
"elevation": 0,
"n": 814,
"e": 955
},
955: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,50)",
"elevation": 0,
"w": 849
},
696: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,53)",
"elevation": 0,
"s": 753,
"w": 695
},
753: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,52)",
"elevation": 0,
"n": 696,
"s": 784,
"e": 775
},
784: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,51)",
"elevation": 0,
"n": 753
},
775: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,52)",
"elevation": 0,
"s": 823,
"e": 790,
"w": 753
},
823: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,51)",
"elevation": 0,
"n": 775,
"e": 824
},
824: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,51)",
"elevation": 0,
"s": 827,
"w": 823
},
827: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,50)",
"elevation": 0,
"n": 824,
"s": 832,
"e": 904,
"w": 985
},
832: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,49)",
"elevation": 0,
"n": 827,
"s": 932,
"e": 844,
"w": 888
},
932: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,48)",
"elevation": 0,
"n": 832,
"e": 950
},
950: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,48)",
"elevation": 0,
"w": 932
},
844: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,49)",
"elevation": 0,
"w": 832
},
888: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,49)",
"elevation": 0,
"e": 832,
"w": 936
},
936: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,49)",
"elevation": 0,
"s": 988,
"e": 888
},
988: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,48)",
"elevation": 0,
"n": 936
},
904: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,50)",
"elevation": 0,
"e": 976,
"w": 827
},
976: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,50)",
"elevation": 0,
"w": 904
},
985: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,50)",
"elevation": 0,
"e": 827
},
790: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,52)",
"elevation": 0,
"e": 835,
"w": 775
},
835: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,52)",
"elevation": 0,
"e": 883,
"w": 790
},
883: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,52)",
"elevation": 0,
"s": 890,
"e": 891,
"w": 835
},
890: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,51)",
"elevation": 0,
"n": 883,
"w": 926
},
926: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,51)",
"elevation": 0,
"e": 890
},
891: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,52)",
"elevation": 0,
"s": 969,
"w": 883
},
969: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,51)",
"elevation": 0,
"n": 891,
"e": 984
},
984: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,51)",
"elevation": 0,
"w": 969
},
505: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,59)",
"elevation": 0,
"e": 525,
"w": 502
},
525: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,59)",
"elevation": 0,
"n": 560,
"s": 542,
"e": 533,
"w": 505
},
560: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,60)",
"elevation": 0,
"s": 525,
"e": 602
},
602: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,60)",
"elevation": 0,
"e": 612,
"w": 560
},
612: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,60)",
"elevation": 0,
"s": 637,
"e": 635,
"w": 602
},
637: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,59)",
"elevation": 0,
"n": 612,
"s": 651,
"e": 650
},
651: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,58)",
"elevation": 0,
"n": 637,
"e": 674
},
674: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,58)",
"elevation": 0,
"e": 778,
"w": 651
},
778: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,58)",
"elevation": 0,
"s": 815,
"w": 674
},
815: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,57)",
"elevation": 0,
"n": 778,
"s": 825
},
825: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,56)",
"elevation": 0,
"n": 815,
"s": 854
},
854: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,55)",
"elevation": 0,
"n": 825
},
650: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,59)",
"elevation": 0,
"e": 758,
"w": 637
},
758: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,59)",
"elevation": 0,
"w": 650
},
635: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,60)",
"elevation": 0,
"e": 720,
"w": 612
},
720: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,60)",
"elevation": 0,
"w": 635
},
542: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,58)",
"elevation": 0,
"n": 525,
"s": 549,
"w": 554
},
549: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,57)",
"elevation": 0,
"n": 542,
"s": 556
},
556: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,56)",
"elevation": 0,
"n": 549,
"s": 600,
"e": 598
},
600: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,55)",
"elevation": 0,
"n": 556,
"s": 648,
"e": 610
},
648: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,54)",
"elevation": 0,
"n": 600,
"s": 735,
"w": 673
},
735: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,53)",
"elevation": 0,
"n": 648
},
673: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,54)",
"elevation": 0,
"e": 648
},
610: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,55)",
"elevation": 0,
"s": 732,
"w": 600
},
732: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,54)",
"elevation": 0,
"n": 610,
"s": 779
},
779: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,53)",
"elevation": 0,
"n": 732
},
598: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,56)",
"elevation": 0,
"e": 659,
"w": 556
},
659: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,56)",
"elevation": 0,
"s": 665,
"e": 754,
"w": 598
},
665: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,55)",
"elevation": 0,
"n": 659,
"s": 723,
"e": 700
},
723: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,54)",
"elevation": 0,
"n": 665,
"s": 816
},
816: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,53)",
"elevation": 0,
"n": 723
},
700: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,55)",
"elevation": 0,
"s": 813,
"w": 665
},
813: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,54)",
"elevation": 0,
"n": 700,
"s": 831,
"e": 858
},
831: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,53)",
"elevation": 0,
"n": 813
},
858: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,54)",
"elevation": 0,
"s": 907,
"e": 879,
"w": 813
},
907: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(66,53)",
"elevation": 0,
"n": 858,
"e": 925
},
925: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,53)",
"elevation": 0,
"s": 965,
"w": 907
},
965: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,52)",
"elevation": 0,
"n": 925,
"e": 980
},
980: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,52)",
"elevation": 0,
"s": 999,
"w": 965
},
999: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(68,51)",
"elevation": 0,
"n": 980
},
879: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(67,54)",
"elevation": 0,
"w": 858
},
754: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,56)",
"elevation": 0,
"w": 659
},
554: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,58)",
"elevation": 0,
"s": 567,
"e": 542
},
567: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,57)",
"elevation": 0,
"n": 554,
"s": 574
},
574: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,56)",
"elevation": 0,
"n": 567,
"s": 588
},
588: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,55)",
"elevation": 0,
"n": 574
},
533: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,59)",
"elevation": 0,
"s": 539,
"w": 525
},
539: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,58)",
"elevation": 0,
"n": 533,
"s": 540
},
540: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,57)",
"elevation": 0,
"n": 539,
"e": 585
},
585: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(64,57)",
"elevation": 0,
"e": 682,
"w": 540
},
682: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,57)",
"elevation": 0,
"w": 585
},
509: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,59)",
"elevation": 0,
"s": 524,
"e": 502,
"w": 510
},
524: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,58)",
"elevation": 0,
"n": 509,
"s": 545,
"w": 526
},
545: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,57)",
"elevation": 0,
"n": 524,
"s": 565
},
565: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,56)",
"elevation": 0,
"n": 545,
"s": 590
},
590: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,55)",
"elevation": 0,
"n": 565,
"s": 625
},
625: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,54)",
"elevation": 0,
"n": 590,
"s": 699
},
699: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,53)",
"elevation": 0,
"n": 625,
"s": 809
},
809: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,52)",
"elevation": 0,
"n": 699
},
526: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,58)",
"elevation": 0,
"s": 538,
"e": 524,
"w": 530
},
538: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,57)",
"elevation": 0,
"n": 526,
"s": 564
},
564: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,56)",
"elevation": 0,
"n": 538,
"s": 586
},
586: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,55)",
"elevation": 0,
"n": 564,
"s": 619,
"w": 599
},
619: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,54)",
"elevation": 0,
"n": 586,
"s": 670
},
670: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,53)",
"elevation": 0,
"n": 619,
"s": 707
},
707: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,52)",
"elevation": 0,
"n": 670,
"s": 719
},
719: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,51)",
"elevation": 0,
"n": 707,
"s": 749,
"e": 800
},
749: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,50)",
"elevation": 0,
"n": 719,
"s": 859,
"e": 822
},
859: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,49)",
"elevation": 0,
"n": 749,
"s": 938
},
938: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,48)",
"elevation": 0,
"n": 859,
"s": 975
},
975: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,47)",
"elevation": 0,
"n": 938,
"s": 983
},
983: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,46)",
"elevation": 0,
"n": 975
},
822: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,50)",
"elevation": 0,
"s": 872,
"w": 749
},
872: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,49)",
"elevation": 0,
"n": 822,
"s": 906,
"e": 968
},
906: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,48)",
"elevation": 0,
"n": 872
},
968: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,49)",
"elevation": 0,
"w": 872
},
800: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,51)",
"elevation": 0,
"w": 719
},
599: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,55)",
"elevation": 0,
"s": 632,
"e": 586
},
632: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,54)",
"elevation": 0,
"n": 599,
"s": 654
},
654: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,53)",
"elevation": 0,
"n": 632,
"s": 677
},
677: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,52)",
"elevation": 0,
"n": 654,
"s": 691
},
691: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,51)",
"elevation": 0,
"n": 677,
"s": 716,
"w": 704
},
716: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,50)",
"elevation": 0,
"n": 691,
"s": 836,
"w": 761
},
836: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,49)",
"elevation": 0,
"n": 716,
"s": 860
},
860: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,48)",
"elevation": 0,
"n": 836,
"s": 941
},
941: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,47)",
"elevation": 0,
"n": 860,
"s": 947
},
947: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,46)",
"elevation": 0,
"n": 941
},
761: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,50)",
"elevation": 0,
"s": 863,
"e": 716,
"w": 837
},
863: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,49)",
"elevation": 0,
"n": 761,
"s": 913,
"w": 873
},
913: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,48)",
"elevation": 0,
"n": 863,
"s": 922
},
922: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,47)",
"elevation": 0,
"n": 913,
"s": 964,
"w": 959
},
964: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,46)",
"elevation": 0,
"n": 922
},
959: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,47)",
"elevation": 0,
"e": 922
},
873: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,49)",
"elevation": 0,
"s": 914,
"e": 863,
"w": 899
},
914: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,48)",
"elevation": 0,
"n": 873
},
899: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,49)",
"elevation": 0,
"e": 873
},
837: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,50)",
"elevation": 0,
"e": 761,
"w": 948
},
948: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,50)",
"elevation": 0,
"e": 837,
"w": 998
},
998: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,50)",
"elevation": 0,
"e": 948
},
704: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,51)",
"elevation": 0,
"e": 691,
"w": 774
},
774: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,51)",
"elevation": 0,
"e": 704,
"w": 842
},
842: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,51)",
"elevation": 0,
"e": 774
},
530: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,58)",
"elevation": 0,
"s": 577,
"e": 526,
"w": 559
},
577: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,57)",
"elevation": 0,
"n": 530,
"s": 589
},
589: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,56)",
"elevation": 0,
"n": 577
},
559: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,58)",
"elevation": 0,
"s": 572,
"e": 530,
"w": 569
},
572: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,57)",
"elevation": 0,
"n": 559,
"s": 621,
"w": 607
},
621: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,56)",
"elevation": 0,
"n": 572,
"s": 634
},
634: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,55)",
"elevation": 0,
"n": 621,
"s": 639,
"w": 636
},
639: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,54)",
"elevation": 0,
"n": 634,
"s": 653,
"w": 702
},
653: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,53)",
"elevation": 0,
"n": 639,
"s": 661,
"w": 690
},
661: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,52)",
"elevation": 0,
"n": 653,
"w": 788
},
788: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,52)",
"elevation": 0,
"e": 661,
"w": 867
},
867: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,52)",
"elevation": 0,
"e": 788,
"w": 881
},
881: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,52)",
"elevation": 0,
"s": 898,
"e": 867,
"w": 884
},
898: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,51)",
"elevation": 0,
"n": 881
},
884: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,52)",
"elevation": 0,
"e": 881
},
690: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,53)",
"elevation": 0,
"e": 653,
"w": 817
},
817: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,53)",
"elevation": 0,
"e": 690
},
702: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,54)",
"elevation": 0,
"e": 639,
"w": 715
},
715: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,54)",
"elevation": 0,
"e": 702,
"w": 791
},
791: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,54)",
"elevation": 0,
"s": 855,
"e": 715,
"w": 852
},
855: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,53)",
"elevation": 0,
"n": 791
},
852: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,54)",
"elevation": 0,
"s": 903,
"e": 791,
"w": 978
},
903: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,53)",
"elevation": 0,
"n": 852,
"w": 951
},
951: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,53)",
"elevation": 0,
"e": 903
},
978: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,54)",
"elevation": 0,
"e": 852
},
636: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,55)",
"elevation": 0,
"e": 634
},
607: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,57)",
"elevation": 0,
"s": 640,
"e": 572,
"w": 630
},
640: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,56)",
"elevation": 0,
"n": 607,
"w": 693
},
693: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,56)",
"elevation": 0,
"s": 694,
"e": 640,
"w": 765
},
694: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,55)",
"elevation": 0,
"n": 693
},
765: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,56)",
"elevation": 0,
"s": 870,
"e": 693
},
870: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,55)",
"elevation": 0,
"n": 765,
"w": 882
},
882: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,55)",
"elevation": 0,
"e": 870
},
630: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,57)",
"elevation": 0,
"e": 607,
"w": 755
},
755: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,57)",
"elevation": 0,
"e": 630,
"w": 766
},
766: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,57)",
"elevation": 0,
"s": 931,
"e": 755,
"w": 857
},
931: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,56)",
"elevation": 0,
"n": 766
},
857: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,57)",
"elevation": 0,
"s": 875,
"e": 766,
"w": 918
},
875: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,56)",
"elevation": 0,
"n": 857,
"s": 989
},
989: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,55)",
"elevation": 0,
"n": 875
},
918: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,57)",
"elevation": 0,
"n": 933,
"e": 857,
"w": 994
},
933: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,58)",
"elevation": 0,
"s": 918
},
994: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,57)",
"elevation": 0,
"e": 918
},
569: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,58)",
"elevation": 0,
"e": 559,
"w": 615
},
615: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,58)",
"elevation": 0,
"e": 569
},
510: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,59)",
"elevation": 0,
"n": 517,
"e": 509,
"w": 513
},
517: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,60)",
"elevation": 0,
"s": 510
},
513: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,59)",
"elevation": 0,
"n": 550,
"e": 510,
"w": 532
},
550: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,60)",
"elevation": 0,
"n": 570,
"s": 513
},
570: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,61)",
"elevation": 0,
"s": 550
},
532: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,59)",
"elevation": 0,
"n": 553,
"e": 513,
"w": 568
},
553: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,60)",
"elevation": 0,
"n": 593,
"s": 532
},
593: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,61)",
"elevation": 0,
"s": 553
},
568: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,59)",
"elevation": 0,
"n": 573,
"e": 532,
"w": 580
},
573: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,60)",
"elevation": 0,
"s": 568
},
580: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,59)",
"elevation": 0,
"e": 568,
"w": 606
},
606: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,59)",
"elevation": 0,
"s": 608,
"e": 580,
"w": 722
},
608: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,58)",
"elevation": 0,
"n": 606,
"w": 752
},
752: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,58)",
"elevation": 0,
"e": 608
},
722: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,59)",
"elevation": 0,
"e": 606,
"w": 763
},
763: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,59)",
"elevation": 0,
"s": 826,
"e": 722,
"w": 846
},
826: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,58)",
"elevation": 0,
"n": 763
},
846: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,59)",
"elevation": 0,
"e": 763
},
503: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,60)",
"elevation": 0,
"w": 500
},
501: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,60)",
"elevation": 0,
"e": 500
},
544: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,61)",
"elevation": 0,
"e": 552,
"w": 504
},
552: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(62,61)",
"elevation": 0,
"e": 604,
"w": 544
},
604: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(63,61)",
"elevation": 0,
"w": 552
},
523: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,61)",
"elevation": 0,
"e": 504
},
531: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,62)",
"elevation": 0,
"w": 506
},
529: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,62)",
"elevation": 0,
"e": 506
},
511: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,63)",
"elevation": 0,
"n": 512,
"e": 507,
"w": 516
},
512: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,64)",
"elevation": 0,
"n": 534,
"s": 511,
"w": 541
},
534: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,65)",
"elevation": 0,
"n": 551,
"s": 512
},
551: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,66)",
"elevation": 0,
"n": 591,
"s": 534,
"w": 558
},
591: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,67)",
"elevation": 0,
"n": 627,
"s": 551
},
627: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,68)",
"elevation": 0,
"n": 643,
"s": 591
},
643: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,69)",
"elevation": 0,
"n": 676,
"s": 627,
"w": 668
},
676: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,70)",
"elevation": 0,
"n": 726,
"s": 643,
"e": 686
},
726: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,71)",
"elevation": 0,
"n": 773,
"s": 676,
"e": 746
},
773: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,72)",
"elevation": 0,
"n": 789,
"s": 726
},
789: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,73)",
"elevation": 0,
"s": 773,
"e": 795
},
795: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,73)",
"elevation": 0,
"n": 804,
"w": 789
},
804: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,74)",
"elevation": 0,
"n": 971,
"s": 795,
"e": 970,
"w": 811
},
971: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,75)",
"elevation": 0,
"s": 804
},
970: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,74)",
"elevation": 0,
"w": 804
},
811: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(59,74)",
"elevation": 0,
"e": 804,
"w": 934
},
934: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,74)",
"elevation": 0,
"n": 945,
"e": 811
},
945: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,75)",
"elevation": 0,
"n": 967,
"s": 934
},
967: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,76)",
"elevation": 0,
"s": 945
},
746: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,71)",
"elevation": 0,
"n": 771,
"w": 726
},
771: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,72)",
"elevation": 0,
"s": 746,
"e": 801
},
801: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(61,72)",
"elevation": 0,
"w": 771
},
686: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(60,70)",
"elevation": 0,
"w": 676
},
668: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,69)",
"elevation": 0,
"n": 706,
"s": 738,
"e": 643,
"w": 688
},
706: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,70)",
"elevation": 0,
"n": 743,
"s": 668
},
743: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,71)",
"elevation": 0,
"n": 760,
"s": 706,
"w": 750
},
760: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,72)",
"elevation": 0,
"s": 743
},
750: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,71)",
"elevation": 0,
"n": 776,
"e": 743,
"w": 840
},
776: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,72)",
"elevation": 0,
"n": 777,
"s": 750,
"w": 805
},
777: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,73)",
"elevation": 0,
"s": 776,
"e": 785,
"w": 894
},
785: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,73)",
"elevation": 0,
"w": 777
},
894: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,73)",
"elevation": 0,
"n": 935,
"e": 777
},
935: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,74)",
"elevation": 0,
"n": 957,
"s": 894
},
957: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,75)",
"elevation": 0,
"s": 935
},
805: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,72)",
"elevation": 0,
"e": 776,
"w": 838
},
838: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,72)",
"elevation": 0,
"n": 851,
"e": 805,
"w": 845
},
851: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,73)",
"elevation": 0,
"n": 940,
"s": 838
},
940: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,74)",
"elevation": 0,
"s": 851
},
845: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,72)",
"elevation": 0,
"n": 895,
"e": 838
},
895: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,73)",
"elevation": 0,
"s": 845,
"w": 916
},
916: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,73)",
"elevation": 0,
"s": 993,
"e": 895,
"w": 987
},
993: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,72)",
"elevation": 0,
"n": 916
},
987: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,73)",
"elevation": 0,
"e": 916
},
840: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,71)",
"elevation": 0,
"e": 750,
"w": 887
},
887: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,71)",
"elevation": 0,
"e": 840,
"w": 949
},
949: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,71)",
"elevation": 0,
"e": 887
},
738: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,68)",
"elevation": 0,
"n": 668
},
688: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,69)",
"elevation": 0,
"n": 745,
"e": 668,
"w": 730
},
745: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,70)",
"elevation": 0,
"s": 688,
"w": 792
},
792: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,70)",
"elevation": 0,
"e": 745
},
730: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,69)",
"elevation": 0,
"e": 688
},
558: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,66)",
"elevation": 0,
"e": 551,
"w": 587
},
587: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,66)",
"elevation": 0,
"n": 594,
"e": 558,
"w": 592
},
594: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,67)",
"elevation": 0,
"n": 649,
"s": 587,
"e": 622,
"w": 641
},
649: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,68)",
"elevation": 0,
"s": 594
},
622: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,67)",
"elevation": 0,
"w": 594
},
641: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,67)",
"elevation": 0,
"n": 663,
"e": 594,
"w": 683
},
663: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,68)",
"elevation": 0,
"s": 641
},
683: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,67)",
"elevation": 0,
"n": 713,
"e": 641
},
713: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,68)",
"elevation": 0,
"n": 747,
"s": 683
},
747: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,69)",
"elevation": 0,
"n": 839,
"s": 713,
"w": 828
},
839: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,70)",
"elevation": 0,
"s": 747,
"w": 911
},
911: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,70)",
"elevation": 0,
"e": 839,
"w": 921
},
921: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,70)",
"elevation": 0,
"e": 911,
"w": 990
},
990: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,70)",
"elevation": 0,
"e": 921,
"w": 991
},
991: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,70)",
"elevation": 0,
"e": 990
},
828: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,69)",
"elevation": 0,
"e": 747
},
592: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,66)",
"elevation": 0,
"e": 587,
"w": 697
},
697: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,66)",
"elevation": 0,
"e": 592
},
541: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,64)",
"elevation": 0,
"n": 543,
"e": 512,
"w": 546
},
543: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,65)",
"elevation": 0,
"s": 541
},
546: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,64)",
"elevation": 0,
"n": 557,
"e": 541,
"w": 548
},
557: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,65)",
"elevation": 0,
"s": 546
},
548: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,64)",
"elevation": 0,
"n": 655,
"e": 546,
"w": 605
},
655: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,65)",
"elevation": 0,
"s": 548
},
605: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,64)",
"elevation": 0,
"n": 679,
"e": 548,
"w": 611
},
679: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,65)",
"elevation": 0,
"s": 605
},
611: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,64)",
"elevation": 0,
"n": 656,
"e": 605,
"w": 624
},
656: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,65)",
"elevation": 0,
"n": 727,
"s": 611
},
727: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,66)",
"elevation": 0,
"n": 759,
"s": 656
},
759: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,67)",
"elevation": 0,
"n": 880,
"s": 727
},
880: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,68)",
"elevation": 0,
"s": 759,
"w": 886
},
886: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,68)",
"elevation": 0,
"e": 880
},
624: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,64)",
"elevation": 0,
"n": 689,
"e": 611
},
689: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,65)",
"elevation": 0,
"s": 624
},
516: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,63)",
"elevation": 0,
"s": 528,
"e": 511,
"w": 527
},
528: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(58,62)",
"elevation": 0,
"n": 516,
"s": 555,
"w": 535
},
555: {
"title": "Wishing Well",
"description": "You are standing besides a large well. A sign next the well reads 'EXAMINE WELL, FIND WEALTH'.",
"terrain": "NORMAL",
"coordinates": "(58,61)",
"elevation": 0,
"n": 528
},
535: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,62)",
"elevation": 0,
"e": 528,
"w": 562
},
562: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,62)",
"elevation": 0,
"e": 535,
"w": 566
},
566: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,62)",
"elevation": 0,
"s": 596,
"e": 562,
"w": 581
},
596: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,61)",
"elevation": 0,
"n": 566,
"w": 597
},
597: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,61)",
"elevation": 0,
"s": 626,
"e": 596,
"w": 657
},
626: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,60)",
"elevation": 0,
"n": 597
},
657: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,61)",
"elevation": 0,
"s": 705,
"e": 597
},
705: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,60)",
"elevation": 0,
"n": 657,
"w": 708
},
708: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,60)",
"elevation": 0,
"e": 705
},
581: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,62)",
"elevation": 0,
"e": 566,
"w": 614
},
614: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,62)",
"elevation": 0,
"e": 581
},
527: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(57,63)",
"elevation": 0,
"e": 516,
"w": 537
},
537: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(56,63)",
"elevation": 0,
"e": 527,
"w": 547
},
547: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(55,63)",
"elevation": 0,
"e": 537,
"w": 575
},
575: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(54,63)",
"elevation": 0,
"e": 547,
"w": 613
},
613: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,63)",
"elevation": 0,
"e": 575,
"w": 616
},
616: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,63)",
"elevation": 0,
"n": 638,
"s": 620,
"e": 613,
"w": 628
},
638: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,64)",
"elevation": 0,
"n": 647,
"s": 616
},
647: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,65)",
"elevation": 0,
"n": 666,
"s": 638,
"w": 701
},
666: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,66)",
"elevation": 0,
"n": 833,
"s": 647,
"e": 729,
"w": 803
},
833: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,67)",
"elevation": 0,
"n": 900,
"s": 666
},
900: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,68)",
"elevation": 0,
"n": 928,
"s": 833
},
928: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,69)",
"elevation": 0,
"s": 900
},
729: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,66)",
"elevation": 0,
"n": 731,
"w": 666
},
731: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(53,67)",
"elevation": 0,
"s": 729
},
803: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,66)",
"elevation": 0,
"n": 834,
"e": 666
},
834: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,67)",
"elevation": 0,
"n": 905,
"s": 803
},
905: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,68)",
"elevation": 0,
"n": 977,
"s": 834
},
977: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,69)",
"elevation": 0,
"s": 905
},
701: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,65)",
"elevation": 0,
"e": 647
},
620: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,62)",
"elevation": 0,
"n": 616,
"s": 660,
"w": 692
},
660: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(52,61)",
"elevation": 0,
"n": 620
},
692: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,62)",
"elevation": 0,
"s": 698,
"e": 620,
"w": 710
},
698: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,61)",
"elevation": 0,
"n": 692,
"s": 714,
"w": 742
},
714: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,60)",
"elevation": 0,
"n": 698,
"w": 783
},
783: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,60)",
"elevation": 0,
"e": 714,
"w": 871
},
871: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,60)",
"elevation": 0,
"s": 942,
"e": 783
},
942: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,59)",
"elevation": 0,
"n": 871
},
742: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,61)",
"elevation": 0,
"e": 698,
"w": 843
},
843: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,61)",
"elevation": 0,
"e": 742
},
710: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,62)",
"elevation": 0,
"e": 692
},
628: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,63)",
"elevation": 0,
"n": 671,
"e": 616,
"w": 681
},
671: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(51,64)",
"elevation": 0,
"s": 628,
"w": 781
},
781: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,64)",
"elevation": 0,
"n": 787,
"e": 671
},
787: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,65)",
"elevation": 0,
"n": 861,
"s": 781
},
861: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,66)",
"elevation": 0,
"n": 930,
"s": 787,
"w": 862
},
930: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,67)",
"elevation": 0,
"s": 861
},
862: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,66)",
"elevation": 0,
"n": 878,
"e": 861
},
878: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,67)",
"elevation": 0,
"s": 862
},
681: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(50,63)",
"elevation": 0,
"e": 628,
"w": 685
},
685: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,63)",
"elevation": 0,
"n": 767,
"s": 744,
"e": 681,
"w": 725
},
767: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,64)",
"elevation": 0,
"n": 796,
"s": 685,
"w": 819
},
796: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,65)",
"elevation": 0,
"s": 767,
"w": 850
},
850: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,65)",
"elevation": 0,
"n": 954,
"e": 796,
"w": 973
},
954: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,66)",
"elevation": 0,
"s": 850
},
973: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,65)",
"elevation": 0,
"n": 981,
"e": 850
},
981: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,66)",
"elevation": 0,
"s": 973
},
819: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,64)",
"elevation": 0,
"e": 767,
"w": 893
},
893: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,64)",
"elevation": 0,
"e": 819,
"w": 944
},
944: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(46,64)",
"elevation": 0,
"e": 893
},
744: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(49,62)",
"elevation": 0,
"n": 685,
"w": 797
},
797: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,62)",
"elevation": 0,
"s": 812,
"e": 744,
"w": 807
},
812: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,61)",
"elevation": 0,
"n": 797,
"s": 892
},
892: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,60)",
"elevation": 0,
"n": 812,
"s": 943
},
943: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,59)",
"elevation": 0,
"n": 892,
"w": 952
},
952: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,59)",
"elevation": 0,
"e": 943
},
807: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,62)",
"elevation": 0,
"n": 856,
"s": 864,
"e": 797,
"w": 869
},
856: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,63)",
"elevation": 0,
"s": 807
},
864: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,61)",
"elevation": 0,
"n": 807,
"s": 927
},
927: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(47,60)",
"elevation": 0,
"n": 864
},
869: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(46,62)",
"elevation": 0,
"e": 807
},
725: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(48,63)",
"elevation": 0,
"e": 685
},
652: {
"title": "Darkness",
"description": "You are standing on grass and surrounded by darkness.",
"terrain": "NORMAL",
"coordinates": "(65,62)",
"elevation": 0,
"w": 609
}
}
| 27.673184
| 120
| 0.470146
| 12,013
| 124,557
| 4.874636
| 0.047865
| 0.110999
| 0.204922
| 0.213033
| 0.799262
| 0.799262
| 0.799262
| 0.799262
| 0.799262
| 0.799262
| 0
| 0.087362
| 0.357258
| 124,557
| 4,500
| 121
| 27.679333
| 0.644096
| 0
| 0
| 0.443556
| 0
| 0.000222
| 0.478247
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4fefcc87a21390eeb6ca3511675484fd71b4a69
| 7,578
|
py
|
Python
|
test/unit/outcome_table_decider.py
|
angelonakos/aws-build-accumulator
|
8768bb60dfda13d1f8b3ca334a2d0c4d84eea2bb
|
[
"Apache-2.0"
] | 369
|
2017-01-27T14:40:37.000Z
|
2022-03-30T09:34:08.000Z
|
test/unit/outcome_table_decider.py
|
angelonakos/aws-build-accumulator
|
8768bb60dfda13d1f8b3ca334a2d0c4d84eea2bb
|
[
"Apache-2.0"
] | 68
|
2020-09-11T17:36:33.000Z
|
2022-03-30T18:14:45.000Z
|
test/unit/outcome_table_decider.py
|
angelonakos/aws-build-accumulator
|
8768bb60dfda13d1f8b3ca334a2d0c4d84eea2bb
|
[
"Apache-2.0"
] | 141
|
2017-01-12T06:19:22.000Z
|
2022-03-17T01:49:33.000Z
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import unittest
import unittest.mock
import lib.job_outcome
class CBMCNegativeTest(unittest.TestCase):
def setUp(self):
self.table = {
"comment": "",
"outcomes": [{
"type": "return-code",
"value": 0,
"action": "fail_ignored",
}, {
"type": "return-code",
"value": 10,
"action": "success",
}, {
"type": "wildcard",
"action": "fail",
}]
}
def assert_outcome_equals(self, jod, outcome):
self.assertEqual(jod.get_job_fields(), outcome)
def test_zero(self):
proc = unittest.mock.Mock()
proc.returncode = 0
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "fail_ignored",
"loaded_outcome_dict": self.table,
})
def test_success(self):
proc = unittest.mock.Mock()
proc.returncode = 10
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "success",
"loaded_outcome_dict": self.table,
})
class CBMCTest(unittest.TestCase):
def setUp(self):
self.table = {
"comment": "",
"outcomes": [{
"type": "return-code",
"value": 0,
"action": "success",
}, {
"type": "return-code",
"value": 10,
"action": "fail_ignored",
}, {
"type": "wildcard",
"action": "fail",
}]
}
def assert_outcome_equals(self, jod, outcome):
self.assertEqual(jod.get_job_fields(), outcome)
def test_success(self):
proc = unittest.mock.Mock()
proc.returncode = 0
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "success",
"loaded_outcome_dict": self.table,
})
def test_ten(self):
proc = unittest.mock.Mock()
proc.returncode = 10
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "fail_ignored",
"loaded_outcome_dict": self.table,
})
def test_one(self):
proc = unittest.mock.Mock()
proc.returncode = 1
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "fail",
"loaded_outcome_dict": self.table,
})
def test_timeout_one(self):
proc = unittest.mock.Mock()
proc.returncode = 1
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, True, True)
self.assert_outcome_equals(jod, {
"outcome": "fail",
"loaded_outcome_dict": self.table,
})
def test_timeout_ten(self):
proc = unittest.mock.Mock()
proc.returncode = 10
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, True, True)
self.assert_outcome_equals(jod, {
"outcome": "fail",
"loaded_outcome_dict": self.table,
})
def test_timeout_zero(self):
# This is sort of weird, in that we wouldn't expect a process to return
# 0 if it's been killed by timeout_happened. Nevertheless, failing is the right
# thing to do in that case.
proc = unittest.mock.Mock()
proc.returncode = 0
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, True, True)
self.assert_outcome_equals(jod, {
"outcome": "fail",
"loaded_outcome_dict": self.table,
})
class TimeoutPriorityTest(unittest.TestCase):
def setUp(self):
self.table = {
"comment": "",
"outcomes": [{
"type": "return-code",
"value": 0,
"action": "success",
}, {
"type": "return-code",
"value": 10,
"action": "fail_ignored",
}, {
"type": "timeout",
"action": "fail_ignored",
}, {
"type": "wildcard",
"action": "fail",
}]
}
def assert_outcome_equals(self, jod, outcome):
self.assertEqual(jod.get_job_fields(), outcome)
def test_success(self):
proc = unittest.mock.Mock()
proc.returncode = 0
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "success",
"loaded_outcome_dict": self.table,
})
def test_ten(self):
proc = unittest.mock.Mock()
proc.returncode = 10
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "fail_ignored",
"loaded_outcome_dict": self.table,
})
def test_one(self):
proc = unittest.mock.Mock()
proc.returncode = 1
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, False, True)
self.assert_outcome_equals(jod, {
"outcome": "fail",
"loaded_outcome_dict": self.table,
})
def test_timeout_one(self):
proc = unittest.mock.Mock()
proc.returncode = 1
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, True, True)
self.assert_outcome_equals(jod, {
"outcome": "fail_ignored",
"loaded_outcome_dict": self.table,
})
def test_timeout_ten(self):
proc = unittest.mock.Mock()
proc.returncode = 10
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, True, True)
self.assert_outcome_equals(jod, {
"outcome": "fail_ignored",
"loaded_outcome_dict": self.table,
})
def test_timeout_zero(self):
proc = unittest.mock.Mock()
proc.returncode = 0
jod = lib.job_outcome.OutcomeTableDecider(
self.table, proc.returncode, True, True)
self.assert_outcome_equals(jod, {
"outcome": "fail_ignored",
"loaded_outcome_dict": self.table,
})
| 28.704545
| 87
| 0.538797
| 759
| 7,578
| 5.229249
| 0.168643
| 0.070295
| 0.081381
| 0.070547
| 0.826405
| 0.826405
| 0.817082
| 0.817082
| 0.817082
| 0.817082
| 0
| 0.006688
| 0.348905
| 7,578
| 263
| 88
| 28.813688
| 0.79773
| 0.09488
| 0
| 0.941799
| 0
| 0
| 0.121292
| 0
| 0
| 0
| 0
| 0
| 0.10582
| 1
| 0.10582
| false
| 0
| 0.015873
| 0
| 0.137566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
762bc5d53a2848daa8641f24b9b4fcf4db70322f
| 15,845
|
py
|
Python
|
lightnion/path_selection.py
|
pthevenet/lightnion
|
c9e842d3c269d0d39fa62d68f7f83ffb17c5161e
|
[
"BSD-3-Clause"
] | 120
|
2019-02-24T20:34:17.000Z
|
2021-11-24T16:03:43.000Z
|
lightnion/path_selection.py
|
pthevenet/lightnion
|
c9e842d3c269d0d39fa62d68f7f83ffb17c5161e
|
[
"BSD-3-Clause"
] | 5
|
2020-01-20T11:45:41.000Z
|
2020-03-03T12:22:42.000Z
|
lightnion/path_selection.py
|
pthevenet/lightnion
|
c9e842d3c269d0d39fa62d68f7f83ffb17c5161e
|
[
"BSD-3-Clause"
] | 3
|
2019-03-18T21:24:43.000Z
|
2020-10-09T02:53:00.000Z
|
import random
import sys
import logging
import lightnion.descriptors as descriptors
# Chutney launches all relays in the same subnet.
# So to test the proxy with Chutney, these checks needs to be disabled.
check_different_subnets = False
def select_path(routers, state, testing=False):
"""Handle the path selection
:params routers: list of the routers given by the consensus
:state:
:returns: updated state tuple (guard, middle, exit)"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
routers = [r for r in routers if obey_minimal_constraint(r)]
state, exit_node = pick_good_exit(routers, state)
state, guard = pick_good_entry(routers, state, exit_node, testing)
state, middle = pick_good_middle(routers, state, exit_node, guard, testing)
return state, guard, middle, exit_node
def select_guard_from_consensus(cons, descr, testing=False):
"""Handle the selection of the guard node
:params routers: list of the routers given by the consensus
:params descr: list of descriptors
:returns: tuple (guard, middle, exit)"""
routers = [r for r in cons['routers'] if obey_minimal_constraint(r)]
guard = pick_good_entry_from_routers(descr, routers, testing)
return guard
def select_end_path_from_consensus(cons, descr, guard, testing=False):
"""Handle the selection of the middle and exit nodes
:params routers: list of the routers given by the consensus
:params descr: list of descriptors
:returns: tuple (guard, middle, exit)"""
routers = [r for r in cons['routers'] if obey_minimal_constraint(r)]
exit_node = pick_good_exit_from_routers(descr, routers, guard)
middle = pick_good_middle_from_routers(descr, routers, exit_node, guard, testing)
return middle, exit_node
def obey_minimal_constraint(router, exit_node=None, guard=None, testing=False):
"""Checks if the given router respects the minimal constraints
:param testing:
:param router: router that must respect the constraints
:param exit_node: the chosen exit node
:param guard: the chosen guard node
:return: boolean"""
flags = router['flags']
router_address = router['address'].split(".")
# check that the router is running, valid and has a recent version of TOR
if 'Running' not in flags:
return False
if 'Valid' not in flags:
return False
if not router['version'].startswith('Tor 0.3.'):
return False
if exit_node is not None:
# check that they are distinct
if router['digest'] == exit_node['digest']:
return False
# check if they are in the same 16 subnet
if check_different_subnets:
exit_addr = exit_node['router']['address'].split(".")
if not testing and router_address[0] == exit_addr[0] and router_address[1] == exit_addr[1]:
return False
if guard is not None:
# check that they are distinct
if router['digest'] == guard['digest']:
return False
# check if they are in the same 16 subnet
if check_different_subnets:
guard_addr = guard['router']['address'].split(".")
if not testing and router_address[0] == guard_addr[0] and router_address[1] == guard_addr[1]:
return False
return True
def in_same_family(r0, r1, r2=None):
"""Check if r0 and r1 (and possibly r0 and r2) are in the same family or not
:param r0: the descriptor of the first router (the router we are trying to see if it meets all conditions to be chosen)
:param r1: the descriptor of the second router
:param r2: the descriptor of the third router (possibly none)
:return: a boolean"""
# check if r0 and r1 are in the same family
if 'family' in r0 and 'family' in r1:
for f in r0['family']:
if f in r1['family']:
return True
if r2 and 'family' in r0 and 'family' in r2:
for f in r0['family']:
if f in r2['family']:
return True
return False
def keep_exit(router, state):
"""Checks that the router is not a bad exit, is not down, is stable,
is valid, does not run an old TOR's version, has an available ed25519
identity key and has an 'accept' exit policy
:params router: the router we want to check
:state:
:return: tuple (boolean that indicates if we keep it, new state, descriptor)"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
if not obey_minimal_constraint(router):
return False, state, None
if 'Exit' not in router['flags']:
return False, state, None
if 'BadExit' in router['flags']:
return False, state, None
# Retrieve the descriptor
state, nhop = descriptors.download(state, cons=router, flavor='unflavored')
if len(nhop) == 0:
return False, state, None
nhop = nhop[0]
if router['digest'] != nhop['digest']:
return False, state, None
if 'identity' not in nhop or nhop['identity']['type'] != 'ed25519':
return False, state, None
if 'policy' not in nhop:
return False, state, None
for rule in nhop['policy']['rules']:
if rule['pattern'] == "*:*" and rule['type'] == 'accept':
return True, state, nhop
return False, state, None
def keep_exit_with_descr(descr, router, guard):
"""Checks that the router is not a bad exit, is not down, is stable,
is valid, does not run an old TOR's version, has an available ed25519
identity key and has an 'accept' exit policy
:param descr: descriptor of the exit candidate.
:params router: the router we want to check
:param guard: the guard in the path
:return: tuple (boolean that indicates if we keep it, new state, descriptor)"""
if not obey_minimal_constraint(router, guard=guard):
return False, None
if 'Exit' not in router['flags']:
return False, None
if 'BadExit' in router['flags']:
return False, None
if router['digest'] not in descr:
return False, None
nhop = descr[router['digest']]
if router['digest'] != nhop['digest']:
return False, None
if 'identity' not in nhop or nhop['identity']['type'] != 'ed25519':
return False, None
if 'policy' not in nhop:
return False, None
if in_same_family(nhop, guard):
return False, None
for rule in nhop['policy']['rules']:
if rule['pattern'] == "*:*" and rule['type'] == 'accept':
return True, nhop
return False, None
def weighted_random_choice(list_of_possible):
"""Choose one of the candidates at random weighted by their (avg) bandwidth
:params list_of_possible: list of descriptors of the candidates
:returns: descriptor of the randomly selected router"""
total = 0
for router in list_of_possible:
total += router['bandwidth']['avg']
r = random.uniform(0, total)
upto = 0
for router in list_of_possible:
if upto + router['bandwidth']['avg'] >= r:
return router
upto += router['bandwidth']['avg']
raise RuntimeError("")
def pick_good_exit(routers, state):
"""Choose the exit node
:param routers: the routers given by the consensus
:param state: the state
:return: the descriptor of the exit node"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
# Go through all routers and check if they meet the conditions of an exit node
candidates = []
for router in routers:
keep, state, descriptor = keep_exit(router, state)
if keep:
candidates.append(descriptor)
if candidates:
return state, weighted_random_choice(candidates)
else:
# TODO: see if we select another policy here
raise ValueError('No exit is suitable')
def pick_good_exit_from_routers(descr, routers, guard):
"""Choose the exit node
:param routers: the routers given by the consensus
:param guard: the guard in the path
:return: the descriptor of the exit node"""
# Go through all routers and check if they meet the conditions of an exit node
candidates = []
for router in routers:
keep, descriptor = keep_exit_with_descr(descr, router, guard)
if keep:
candidates.append(descriptor)
if candidates:
return weighted_random_choice(candidates)
else:
# TODO: see if we select another policy here
raise ValueError('No exit is suitable')
def keep_guard(router, state, exit_node, testing):
"""Checks that the given router has the properties to be a guard
:param router: the router that must be inspected
:param state: state
:param exit_node: the previously chosen node
:return: if the router can be kept, the updated state and the descriptor"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
flags = router['flags']
if not obey_minimal_constraint(router, exit_node, testing=testing):
return False, state, None
if 'Guard' not in flags:
return False, state, None
if 'Stable' not in flags:
return False, state, None
if 'Fast' not in flags:
return False, state, None
if 'V2Dir' not in flags:
return False, state, None
# Retrieve the descriptor
state, nhop = descriptors.download(state, cons=router, flavor='unflavored')
if len(nhop) == 0:
return False, state, None
nhop = nhop[0]
if router['digest'] != nhop['digest']:
return False, state, None
if 'identity' not in nhop or nhop['identity']['type'] != 'ed25519':
return False, state, None
if in_same_family(nhop, exit_node):
return False, state, None
return True, state, nhop
def keep_guard_with_descr(descr, router, testing):
"""Checks that the given router has the properties to be a guard
:param descr: descriptor of the exit candidate.
:param router: the router that must be inspected
:return: if the router can be kept, the updated state and the descriptor"""
flags = router['flags']
if not obey_minimal_constraint(router, testing=testing):
return False, None
if 'Guard' not in flags:
return False, None
if 'Stable' not in flags:
return False, None
if 'Fast' not in flags:
return False, None
if 'V2Dir' not in flags:
return False, None
if router['digest'] not in descr:
return False, None
nhop = descr[router['digest']]
if router['digest'] != nhop['digest']:
return False, None
if 'identity' not in nhop or nhop['identity']['type'] != 'ed25519':
return False, None
return True, nhop
def pick_good_entry(routers, state, exit_node, testing):
"""Simplified version of the Guard selection algorithm
:param routers: the routers of the consensus
:param state: the state
:return: updated state and the descriptor of the guard node"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
candidates = []
for router in routers:
keep, state, descriptor = keep_guard(router, state, exit_node, testing)
if keep:
candidates.append(descriptor)
if candidates:
return state, weighted_random_choice(candidates)
else:
# TODO: see if we select another policy here
raise ValueError('No guard is suitable')
def pick_good_entry_from_routers(descr, routers, testing):
"""Simplified version of the Guard selection algorithm
:param routers: the routers of the consensus
:param state: the state
:return: updated state and the descriptor of the guard node"""
candidates = []
for router in routers:
keep, descriptor = keep_guard_with_descr(descr, router, testing)
if keep:
candidates.append(descriptor)
if candidates:
return weighted_random_choice(candidates)
else:
# TODO: see if we select another policy here
raise ValueError('No guard is suitable')
def keep_middle(router, state, exit_node, guard, testing):
"""Checks that the given router has the properties to be a middle node
:param router: the router that must be inspected
:param state: state
:param exit_node: the previously chosen exit node
:param guard: the previously chosen guard
:return: if the router can be kept, the updated state and the descriptor"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
if not obey_minimal_constraint(router, exit_node, guard, testing=testing):
return False, state, None
# Retrieve the descriptor
state, nhop = descriptors.download(state, cons=router, flavor='unflavored')
if len(nhop) == 0:
return False, state, None
nhop = nhop[0]
if router['digest'] != nhop['digest']:
return False, state, None
if 'identity' not in nhop or nhop['identity']['type'] != 'ed25519':
return False, state, None
if in_same_family(nhop, guard, exit_node):
return False, state, None
return True, state, nhop
def keep_middle_with_descr(descr, router, exit_node, guard, testing):
"""Checks that the given router has the properties to be a middle node
:param descr: descriptor of the exit candidate.
:param router: the router that must be inspected
:param exit_node: the previously chosen exit node
:param guard: the previously chosen guard
:return: if the router can be kept, the updated state and the descriptor"""
if not obey_minimal_constraint(router, exit_node, guard, testing=testing):
return False, None
if router['digest'] not in descr:
return False, None
nhop = descr[router['digest']]
if router['digest'] != nhop['digest']:
return False, None
if 'identity' not in nhop or nhop['identity']['type'] != 'ed25519':
return False, None
if in_same_family(nhop, guard, exit_node):
return False, None
return True, nhop
def pick_good_middle(routers, state, exit_node, guard, testing):
"""Choose the middle node given the exit and the guard node
:param routers: the routers of the consensus
:param state: the state
:param exit_node: the previously chosen exit node
:param guard: the previously chosen guard node
:return: updated state and the descriptor of the guard node"""
logging.warning('Use of DEPRECATED method %s()' % sys._getframe().f_code.co_name)
candidates = []
for router in routers:
keep, state, descriptor = keep_middle(router, state, exit_node, guard, testing)
if keep:
candidates.append(descriptor)
if candidates:
return state, weighted_random_choice(candidates)
else:
# TODO: see if we select another policy here
raise ValueError('No middle node is suitable')
def pick_good_middle_from_routers(descr, routers, exit_node, guard, testing):
"""Choose the middle node given the exit and the guard node
:param routers: the routers of the consensus
:param exit_node: the previously chosen exit node
:param guard: the previously chosen guard node
:return: updated state and the descriptor of the guard node"""
candidates = []
for router in routers:
keep, descriptor = keep_middle_with_descr(descr, router, exit_node, guard, testing)
if keep:
candidates.append(descriptor)
if candidates:
return weighted_random_choice(candidates)
else:
# TODO: see if we select another policy here
raise ValueError('No middle node is suitable')
| 31.69
| 123
| 0.662354
| 2,183
| 15,845
| 4.720568
| 0.087494
| 0.055507
| 0.034158
| 0.042698
| 0.837263
| 0.817661
| 0.79049
| 0.748957
| 0.675885
| 0.641048
| 0
| 0.007217
| 0.247965
| 15,845
| 499
| 124
| 31.753507
| 0.857586
| 0.3213
| 0
| 0.718367
| 0
| 0
| 0.095657
| 0
| 0
| 0
| 0
| 0.012024
| 0
| 1
| 0.073469
| false
| 0
| 0.016327
| 0
| 0.379592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
523c1e341a5204826fd89a4dda3bfe095e52581a
| 154
|
py
|
Python
|
todo/admin.py
|
jonpas/FERI-WebApps
|
27e4fa0e741af31ac9344c72cf7e8a5e6289725d
|
[
"MIT"
] | null | null | null |
todo/admin.py
|
jonpas/FERI-WebApps
|
27e4fa0e741af31ac9344c72cf7e8a5e6289725d
|
[
"MIT"
] | null | null | null |
todo/admin.py
|
jonpas/FERI-WebApps
|
27e4fa0e741af31ac9344c72cf7e8a5e6289725d
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
admin.site.register(models.Tag)
admin.site.register(models.List)
admin.site.register(models.Task)
| 19.25
| 32
| 0.805195
| 23
| 154
| 5.391304
| 0.478261
| 0.217742
| 0.41129
| 0.556452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084416
| 154
| 7
| 33
| 22
| 0.879433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
524129be14735599c90cd7cf94bbc7d92edceeb8
| 1,013
|
py
|
Python
|
my_life/lifeServerApiApp/App/DiaryService/order/diary_serializer.py
|
CLAY-zhao/MyLife
|
994e1f4b2cb20b0bd09edc95ea5ed0b09010a1e3
|
[
"bzip2-1.0.6"
] | null | null | null |
my_life/lifeServerApiApp/App/DiaryService/order/diary_serializer.py
|
CLAY-zhao/MyLife
|
994e1f4b2cb20b0bd09edc95ea5ed0b09010a1e3
|
[
"bzip2-1.0.6"
] | 1
|
2022-01-15T05:36:51.000Z
|
2022-01-15T05:36:51.000Z
|
my_life/lifeServerApiApp/App/DiaryService/order/diary_serializer.py
|
CLAY-zhao/MyLife
|
994e1f4b2cb20b0bd09edc95ea5ed0b09010a1e3
|
[
"bzip2-1.0.6"
] | null | null | null |
from rest_framework import serializers
class HomeDiarySerializer(serializers.Serializer):
# read_only = True 只能读不能写
id = serializers.IntegerField(read_only=True, label='id')
name = serializers.CharField(max_length=255, label='名称')
title = serializers.CharField()
comment = serializers.CharField()
date = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S')
classification = serializers.CharField(max_length=255, label='分类')
image = serializers.CharField(max_length=1024, label='image')
class SpecDiarySerializer(serializers.Serializer):
id = serializers.IntegerField(read_only=True, label='id')
name = serializers.CharField(max_length=255, label='名称')
title = serializers.CharField()
content =serializers.CharField()
comment = serializers.CharField()
date = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S')
classification = serializers.CharField(max_length=255, label='分类')
image = serializers.CharField(max_length=1024, label='image')
| 38.961538
| 70
| 0.729516
| 116
| 1,013
| 6.284483
| 0.318966
| 0.301783
| 0.1893
| 0.238683
| 0.79561
| 0.79561
| 0.79561
| 0.79561
| 0.79561
| 0.79561
| 0
| 0.022857
| 0.136229
| 1,013
| 25
| 71
| 40.52
| 0.810286
| 0.023692
| 0
| 0.777778
| 0
| 0
| 0.056738
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.055556
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
529467e9c302cb7a582c26802fe3981b08831d31
| 524
|
py
|
Python
|
OpenGLCffi/GL/EXT/EXT/texture3D.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/EXT/texture3D.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/EXT/texture3D.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
from OpenGLCffi.GL import params
@params(api='gl', prms=['target', 'level', 'internalformat', 'width', 'height', 'depth', 'border', 'format', 'type', 'pixels'])
def glTexImage3DEXT(target, level, internalformat, width, height, depth, border, format, type, pixels):
pass
@params(api='gl', prms=['target', 'level', 'xoffset', 'yoffset', 'zoffset', 'width', 'height', 'depth', 'format', 'type', 'pixels'])
def glTexSubImage3DEXT(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, pixels):
pass
| 43.666667
| 132
| 0.685115
| 61
| 524
| 5.885246
| 0.393443
| 0.122563
| 0.178273
| 0.083565
| 0.791086
| 0.791086
| 0.707521
| 0.707521
| 0.707521
| 0.707521
| 0
| 0.004292
| 0.110687
| 524
| 11
| 133
| 47.636364
| 0.766094
| 0
| 0
| 0.285714
| 0
| 0
| 0.250958
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.285714
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
52973b676bf8441632180263b05b22b443f4a4d0
| 14,172
|
py
|
Python
|
gwide/Classes/HittableClass.py
|
tturowski/gwide
|
32ab0168fba15fa1ad904639057d90014448f821
|
[
"Apache-2.0"
] | 7
|
2015-11-06T16:18:12.000Z
|
2021-04-16T09:43:50.000Z
|
gwide/Classes/HittableClass.py
|
tturowski/gwide
|
32ab0168fba15fa1ad904639057d90014448f821
|
[
"Apache-2.0"
] | null | null | null |
gwide/Classes/HittableClass.py
|
tturowski/gwide
|
32ab0168fba15fa1ad904639057d90014448f821
|
[
"Apache-2.0"
] | 1
|
2020-06-26T18:58:48.000Z
|
2020-06-26T18:58:48.000Z
|
#!/usr/bin/env python
import os, re, math, sys
import pandas as pd
import gwide.methods as gtk
import matplotlib.pyplot as plt
class HittableClass():
def __init__(self, gtf, whole_name, n_rpM, out_prefix, read_stdin):
self.gtf = gtf
self.whole_name = whole_name
self.n_rpM = n_rpM
if out_prefix:
self.out_prefix = out_prefix+'_'
else:
self.out_prefix = str()
self.read_stdin = read_stdin
def correlation(self, output, gene_class, use_RPKM=False):
print "# Calculate correlation is running..."
if use_RPKM == True: self.out_prefix = self.out_prefix + 'RPKM_'
no_of_reads = dict()
genes_name = list()
normalizator = float()
classes = list()
paths = gtk.list_paths_in_current_dir('hittable_reads.txt', stdin=self.read_stdin) #get paths of hittables
experiments, paths = gtk.define_experiments(paths_in=paths, whole_name=self.whole_name) #extract experiments from paths
data = pd.DataFrame(columns=[['gene_name', 'gene_id', 'type']+experiments]) # initialize Pandas DataFrame
print "# Currently reading GTF file: "+ self.gtf
#reading gtf line by line
for line in open(self.gtf, 'r'):
if not line.startswith('#'):
line_elements = line.strip().split('\t')
type = str(line_elements[1])
if type not in classes:
classes.append(type)
try:
gene_name = re.search("gene_name\s\"(.*?)\"", str(line_elements[8])).group(1)
except:
gene_name = re.search("gene_id\s\"(.*?)\"", str(line_elements[8])).group(1) # when there is no gene name
gene_id = re.search("gene_id\s\"(.*?)\"", str(line_elements[8])).group(1)
if gene_name not in genes_name:
genes_name.append(gene_name)
gene_data = pd.DataFrame([[gene_name, gene_id, type]+([0]*len(experiments))], columns=(['gene_name', 'gene_id', 'type']+experiments))
data = data.append(gene_data, ignore_index=True)
data = data.set_index(['gene_name'])
#create file with no of reads
no_of_reads_file = open(self.out_prefix+"no_of_reads.table",'w')
no_of_reads_file.write("# experiment"+'\t'+"mapped_reads"+'\t'+"total_reads"+'\n')
#filling dataframe and close no_of_reads_file
for path_no, path in enumerate(paths):
name = experiments[path_no]
no_of_reads[name] = dict()
print "# Currently reading: "+path+" ..."
for line in open(path, 'r'):
if line.startswith('# total number of reads') and not line.startswith('# total number of reads without'):
total_reads = int(filter(str.isdigit, line)) # no of reads
no_of_reads[name]['total_reads'] = total_reads
if line.startswith('# total number of single reads'):
total_mapped_reads = int(filter(str.isdigit, line)) # no of mapped reads
no_of_reads[name]['total_mapped_reads'] = total_mapped_reads
normalizator = 1000000.0/total_mapped_reads
if not line.startswith('#'):
line_elements = line.strip().split('\t')
if len(line_elements) == 4:
gene_name, hits = line_elements[0], float(line_elements[1])
# print gene_name
data.loc[gene_name, name] = float(math.ceil(float(hits*normalizator)))
elif len(line_elements) == 6:
gene_name, hits, RPKM = line_elements[0], float(line_elements[1]), float(line_elements[2])
# print gene_name
if use_RPKM == False: data.loc[gene_name, name] = float(math.ceil(float(hits * normalizator)))
else: data.loc[gene_name, name] = float(RPKM)
no_of_reads_file.write(name+'\t'+str(no_of_reads[name]['total_mapped_reads'])+'\t'+str(no_of_reads[name]['total_reads'])+'\n')
no_of_reads_file.close()
corr_dict = {"p" : "pearson" , "k" : "kendall" , "s" : "spearman"}
if output == 'a':
print "# Calculating all correlations..."
for i in corr_dict:
print "# Calculating correlations("+corr_dict[i]+")..."
matrix = data.corr(method=corr_dict[i],min_periods=1)
matrix.to_csv(self.out_prefix+"genome_wide_correlation_"+corr_dict[i]+".table", sep='\t')
#calculate Pearson for different types
if gene_class == True:
for this_type in classes:
new_data = data[data.type == this_type]
matrix = new_data.corr(method=corr_dict[i],min_periods=1)
matrix.to_csv(self.out_prefix+this_type+"_correlation_"+corr_dict[i]+".table", sep='\t')
else:
print "# Calculating correlations("+corr_dict[output]+")..."
matrix = data.corr(method=corr_dict[output],min_periods=1)
matrix.to_csv(self.out_prefix+"genome_wide_correlation_"+corr_dict[output]+".table", sep='\t')
#calculate Pearson for different types
if gene_class == True:
for this_type in classes:
new_data = data[data.type == this_type]
matrix = new_data.corr(method=corr_dict[output],min_periods=1)
matrix.to_csv(self.out_prefix+this_type+"_correlation_"+corr_dict[output]+".table", sep='\t')
def count(self, normalize=True, use_RPKM=False):
no_of_reads = dict()
genes_name = list()
normalizator = float()
if normalize==True: self.out_prefix = self.out_prefix+'rpM_'
if use_RPKM == True: self.out_prefix = self.out_prefix + 'RPKM_'
paths = gtk.list_paths_in_current_dir('hittable_reads.txt', stdin=self.read_stdin) #get paths of hittables
experiments, paths = gtk.define_experiments(paths_in=paths, whole_name=self.whole_name) #extract experiments from paths
data = pd.DataFrame(columns=[['gene_name', 'gene_id', 'type']+experiments]) # initialize Pandas DataFrame
#reading gtf file
print "Currently reading GTF file: "+self.gtf
for line in open(self.gtf, 'r'):
if not line.startswith('#'):
line_elements = line.strip().split('\t')
type = str(line_elements[1])
try:
gene_name = re.search("gene_name\s\"(.*?)\"", str(line_elements[8])).group(1)
except:
gene_name = re.search("gene_id\s\"(.*?)\"", str(line_elements[8])).group(1) # when there is no gene name
print "No gene name in GTF file! Used gene id: "+gene_name+" as gene name."
gene_id = re.search("gene_id\s\"(.*?)\"", str(line_elements[8])).group(1)
if gene_name not in genes_name:
genes_name.append(gene_name)
gene_data = pd.DataFrame([[gene_name, gene_id, type]+([0]*len(experiments))], columns=(['gene_name', 'gene_id', 'type']+experiments))
data = data.append(gene_data, ignore_index=True)
data = data.set_index(['gene_name'])
#filling dataframe
for path_no, path in enumerate(paths):
name = experiments[path_no]
print "Currently reading: "+path+"..."
for line in open(path, 'r'):
if line.startswith('# total number of single reads'):
total_mapped_reads = int(filter(str.isdigit, line)) # no of mapped reads
no_of_reads[name] = total_mapped_reads
if normalize == True: normalizator = 1000000.0/total_mapped_reads
else: normalizator = 1.0
if not line.startswith('#'):
line_elements = line.strip().split('\t')
if len(line_elements) == 4:
gene_name, hits = line_elements[0], float(line_elements[1])
# print gene_name
data.loc[gene_name, name] = float(math.ceil(float(hits*normalizator)))
elif len(line_elements) == 6:
gene_name, hits, RPKM = line_elements[0], float(line_elements[1]), float(line_elements[2])
# print gene_name
if use_RPKM == False: data.loc[gene_name, name] = float(math.ceil(float(hits * normalizator)))
else: data.loc[gene_name, name] = float(RPKM)
print "Creating output.tab file..."
data.to_csv(self.out_prefix+'output.tab', sep='\t')
def plot(self, print_single):
paths = gtk.list_paths_in_current_dir('hittable_reads.txt', stdin=self.read_stdin) #get paths of hittables
experiments, paths = gtk.define_experiments(paths_in=paths, whole_name=self.whole_name) #extract experiments from paths
#initiating DataFrame
data = pd.DataFrame(columns=[['group']+['legend']+experiments]) # initialize Pandas DataFrame
data = data.set_index(['group'])
general = dict()
#filling DataFrame
for path_no, hittable in enumerate(paths):
name = experiments[path_no]
general[name] = list() ## [total_mapped_reads, total_reads]
for line in open(hittable):
if line.startswith('# total number of mapped reads:'):
line_elements = line.strip().split('\t')
total_mapped_reads = int(line_elements[1])
general[name].append(total_mapped_reads)
if line.startswith('# total number of reads') and not line.startswith('# total number of reads without'):
line_elements = line.strip().split('\t')
total_reads = int(line_elements[1])
general[name].append(total_reads)
if line.startswith('##'):
line_elements = line.strip().split('\t')
type_of_reads = str(line_elements[0].strip('#').strip())
no_of_reads = int(line_elements[1])
data.loc[type_of_reads, name] = no_of_reads
data = data.fillna(0)
print data
colors = ['lightblue', 'yellowgreen', 'darkred', 'gold',
'white','lightcoral','blue','pink', 'darkgreen',
'yellow','grey','violet','magenta','cyan']
if print_single == False:
fig = plt.figure(figsize=(12, 9), dpi=100, facecolor='w', edgecolor='k')
fig_no = 1
plot_no = 1
fig.add_subplot(3, 3, plot_no)
plt.title('Legend:')
plt.pie(data[experiments[0]], colors=colors, autopct='%1.1f%%', labeldistance=1.1, startangle=90)
plt.legend(data.index, loc=0)
for e in experiments:
plot_no += 1
fig.add_subplot(3, 3, plot_no)
plt.pie(data[e], colors=colors, autopct='%1.1f%%', labeldistance=1.1, startangle=90)
plt.axis('equal')
plt.tight_layout()
plt.title(e)
plt.text(0,-0.9,'mapped reads/total reads: \n'+str(general[e][1])+'/'+str(general[e][0]),fontsize=12, horizontalalignment='center')
if plot_no == 9:
plt.savefig('piecharts_'+str(fig_no)+'.png')
fig_no += 1
plt.clf()
plot_no = 0
if plot_no > 0:
plt.savefig('piecharts_'+str(fig_no)+'.png')
plt.clf()
elif print_single == True:
print "Plotting piecharts in separate files..."
for e in experiments:
plt.pie(data[e], colors=colors, autopct='%1.1f%%', labeldistance=1.1, startangle=90)
plt.legend(data.index, loc=4)
plt.axis('equal')
plt.tight_layout()
plt.title(e)
plt.text(-0.9,-0.9,'mapped reads/total reads: \n'+str(general[e][1])+'/'+str(general[e][0]),fontsize=12, horizontalalignment='center')
plt.savefig(e+'.png')
plt.clf()
print 'Done.'
def classes_to_tab(self):
paths = gtk.list_paths_in_current_dir('hittable_reads.txt', stdin=self.read_stdin) # get paths of hittables
experiments, paths = gtk.define_experiments(paths_in=paths,
whole_name=self.whole_name) # extract experiments from paths
# initiating DataFrame
data = pd.DataFrame(columns=[['group'] + ['legend'] + experiments]) # initialize Pandas DataFrame
data = data.set_index(['group'])
general = dict()
# filling DataFrame
for path_no, hittable in enumerate(paths):
name = experiments[path_no]
general[name] = list() ## [total_mapped_reads, total_reads]
for line in open(hittable):
if line.startswith('# total number of mapped reads:'):
line_elements = line.strip().split('\t')
total_mapped_reads = int(line_elements[1])
general[name].append(total_mapped_reads)
if line.startswith('# total number of reads') and not line.startswith(
'# total number of reads without'):
line_elements = line.strip().split('\t')
total_reads = int(line_elements[1])
general[name].append(total_reads)
if line.startswith('##'):
line_elements = line.strip().split('\t')
type_of_reads = str(line_elements[0].strip('#').strip())
no_of_reads = int(line_elements[1])
data.loc[type_of_reads, name] = no_of_reads
data = data.fillna(0)
data.to_csv('classes.tab', sep='\t')
print 'Done.'
| 52.880597
| 153
| 0.559836
| 1,711
| 14,172
| 4.435418
| 0.125073
| 0.063249
| 0.023719
| 0.027672
| 0.826591
| 0.804322
| 0.786533
| 0.753327
| 0.749111
| 0.737778
| 0
| 0.011789
| 0.311671
| 14,172
| 268
| 154
| 52.880597
| 0.766171
| 0.060471
| 0
| 0.633929
| 0
| 0
| 0.111304
| 0.003615
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.017857
| null | null | 0.075893
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8704196f0bfc8d94f14be529b50fafc7d897826c
| 21,934
|
py
|
Python
|
tests/forum/comments/test_comments.py
|
kozzztik/tulius
|
81b8f6484eefdc453047f62173a08f5e6f640cd6
|
[
"MIT"
] | 1
|
2020-04-21T15:09:18.000Z
|
2020-04-21T15:09:18.000Z
|
tests/forum/comments/test_comments.py
|
kozzztik/tulius
|
81b8f6484eefdc453047f62173a08f5e6f640cd6
|
[
"MIT"
] | 70
|
2019-04-10T22:32:32.000Z
|
2022-03-11T23:12:54.000Z
|
tests/forum/comments/test_comments.py
|
kozzztik/tulius
|
81b8f6484eefdc453047f62173a08f5e6f640cd6
|
[
"MIT"
] | 1
|
2019-04-12T14:55:39.000Z
|
2019-04-12T14:55:39.000Z
|
import pytest
from tulius.forum.threads import models
from tulius.forum.comments import signals
def test_comments_api(client, superuser, admin, user):
# create root room and thread in it
response = superuser.put(
'/api/forum/', {
'title': 'group', 'body': 'group description',
'room': True, 'default_rights': None, 'granted_rights': []})
assert response.status_code == 200
group = response.json()
response = admin.put(
group['url'], {
'title': 'thread', 'body': 'thread description',
'room': False, 'default_rights': models.NO_ACCESS,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread = response.json()
assert thread['first_comment_id'] is not None
# check how thread looks on room page
response = admin.get(group['url'])
assert response.status_code == 200
data = response.json()
assert data['threads'][0]['comments_count'] == 1
last_comment = data['threads'][0]['last_comment']
assert last_comment['id'] == thread['first_comment_id']
# check comments not readable for other users
response = user.get(thread['url'] + 'comments_page/')
assert response.status_code == 403
# make thread readable
response = admin.put(
thread['url'] + 'granted_rights/', {
'default_rights': models.ACCESS_READ
})
assert response.status_code == 200
# check user now can read comments
response = user.get(thread['url'] + 'comments_page/')
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 1
first_comment = data['comments'][0]
assert first_comment['title'] == 'thread'
assert first_comment['body'] == 'thread description'
assert first_comment['is_thread']
assert not first_comment['edit_right']
assert first_comment['id'] == thread['first_comment_id']
# check that user can't post comments
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': first_comment['id'],
'title': 'hello', 'body': 'world',
'media': {},
})
assert response.status_code == 403
# make thread opened
response = admin.put(
thread['url'] + 'granted_rights/', {'default_rights': None})
assert response.status_code == 200
# check comment preview works
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': first_comment['id'],
'title': 'hello', 'body': 'world',
'media': {}, 'preview': True,
})
assert response.status_code == 200
data = response.json()
assert data['id'] is None
assert data['user']['id'] == user.user.pk
assert data['title'] == 'hello'
assert data['body'] == 'world'
# check that comment really not created
response = user.get(thread['url'] + 'comments_page/')
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 1
# now really post comment
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': first_comment['id'],
'title': 'hello', 'body': 'world',
'media': {},
})
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 2
comment = data['comments'][1]
assert comment['id']
assert comment['user']['id'] == user.user.pk
assert comment['title'] == 'hello'
assert comment['body'] == 'world'
# check anonymous cant post comments
response = client.post(
thread['url'] + 'comments_page/', {
'reply_id': first_comment['id'],
'title': 'hello', 'body': 'world',
'media': {},
})
assert response.status_code == 403
# check how thread looks on room page
response = admin.get(group['url'])
assert response.status_code == 200
data = response.json()
assert data['threads'][0]['comments_count'] == 2
last_comment = data['threads'][0]['last_comment']
assert last_comment['id'] == comment['id']
# check user can update his comment
response = user.post(
comment['url'], {
'reply_id': first_comment['id'],
'title': 'hello world', 'body': 'world is great',
'media': {},
})
assert response.status_code == 200
data = response.json()
assert data['id'] == comment['id']
assert data['title'] == 'hello world'
assert data['body'] == 'world is great'
# check it is really updated
response = user.get(comment['url'])
assert response.status_code == 200
data = response.json()
assert data['id'] == comment['id']
assert data['title'] == 'hello world'
assert data['body'] == 'world is great'
# delete comment
response = user.delete(comment['url'] + '?comment=wow')
assert response.status_code == 200
# check it is deleted
response = user.get(thread['url'] + 'comments_page/')
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 1
assert data['comments'][0]['id'] == first_comment['id']
# check how thread looks on room page
response = admin.get(group['url'])
assert response.status_code == 200
data = response.json()
assert data['threads'][0]['comments_count'] == 1
last_comment = data['threads'][0]['last_comment']
assert last_comment['id'] == thread['first_comment_id']
# check we can't delete first comment
response = superuser.delete(first_comment['url'] + '?comment=wow')
assert response.status_code == 403
# add comment by admin
response = admin.post(
thread['url'] + 'comments_page/', {
'reply_id': first_comment['id'],
'title': 'Im admin', 'body': 'my comment is awesome',
'media': {},
})
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 2
admin_comment = data['comments'][1]
# check user can't delete it
response = user.delete(admin_comment['url'] + '?comment=wow')
assert response.status_code == 403
# check user can't update it
response = user.post(
admin_comment['url'], {
'reply_id': first_comment['id'],
'title': 'hello world', 'body': 'world is great',
'media': {},
})
assert response.status_code == 403
# check comments readable by anonymous user
response = client.get(thread['url'] + 'comments_page/')
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 2
# check we can't update first comment as comment
response = admin.post(
first_comment['url'], {
'reply_id': first_comment['id'],
'title': 'hello world', 'body': 'world is great',
'media': {},
})
assert response.status_code == 403
# check update comment preview
response = admin.post(
admin_comment['url'], {
'reply_id': first_comment['id'],
'title': 'hello world', 'body': 'world is great',
'media': {}, 'preview': True
})
assert response.status_code == 200
data = response.json()
assert data['id'] == admin_comment['id']
assert data['title'] == 'hello world'
assert data['body'] == 'world is great'
# check it is not really updated
response = admin.get(admin_comment['url'])
assert response.status_code == 200
data = response.json()
assert data['title'] == 'Im admin'
assert data['body'] == 'my comment is awesome'
# check we can't reply to comment in other thread
response = admin.put(
group['url'], {
'title': 'thread2', 'body': 'thread2 description',
'room': False, 'default_rights': None,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread2 = response.json()
response = admin.post(
thread['url'] + 'comments_page/', {
'reply_id': thread2['first_comment_id'],
'title': 'Im admin2', 'body': 'my comment is awesome2',
'media': {},
})
assert response.status_code == 403
# check comment without body is not added
response = admin.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'Im admin2', 'body': '',
'media': {},
})
assert response.status_code == 200
data = response.json()
assert len(data['comments']) == 2
def test_broken_last_comment(room_group, thread, user):
# check last comment is on place
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
last_comment = data['threads'][0]['last_comment']
assert last_comment['id'] == thread['first_comment_id']
# break it
obj = models.Thread.objects.get(pk=thread['id'])
obj.data['last_comment']['all'] += 1
obj.save()
# check it not breaks original view
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert 'last_comment' not in data['threads'][0]
def _my_receiver(comment, **_kwargs):
comment.media['bar'] = 'foo'
return True
def test_after_update_saves_comment(thread, user):
# do "fix"
signals.after_add.connect(_my_receiver)
try:
response = user.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'ho ho ho', 'body': 'happy new year',
'media': {},
})
finally:
assert signals.after_add.disconnect(_my_receiver)
assert response.status_code == 200
data = response.json()
response = user.get(data['comments'][1]['url'])
assert response.status_code == 200
comment = response.json()
assert comment['media']['bar'] == 'foo'
def test_comment_counters_on_rights_change(room_group, admin, client):
# Create room in root room
response = admin.put(
room_group['url'], {
'title': 'room1', 'body': 'room1 description',
'room': True, 'default_rights': None,
'granted_rights': []})
assert response.status_code == 200
room = response.json()
# create thread
response = admin.put(
room['url'], {
'title': 'thread1', 'body': 'thread1 description',
'room': False, 'default_rights': None,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread = response.json()
# check initial state
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
response = client.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
# close thread
response = admin.put(
thread['url'] + 'granted_rights/', {
'default_rights': models.NO_ACCESS})
assert response.status_code == 200
# check counters, admin still see
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id']
assert data['rooms'][0]['comments_count'] == 1
# but anonymous user is not
response = client.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert 'last_comment' not in data['rooms'][0]
assert data['rooms'][0]['comments_count'] == 0
def test_comment_counters_on_rights_combination(room_group, admin, user):
# Create room in root room
response = admin.put(
room_group['url'], {
'title': 'room1', 'body': 'room1 description',
'room': True, 'default_rights': None,
'granted_rights': []})
assert response.status_code == 200
room = response.json()
# create thread1 - closed
response = admin.put(
room['url'], {
'title': 'thread1', 'body': 'thread1 description',
'room': False, 'default_rights': models.NO_ACCESS,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread1 = response.json()
# check state
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id'] == \
thread1['first_comment_id']
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert 'last_comment' not in data['rooms'][0]
# add opened thread
response = admin.put(
room['url'], {
'title': 'thread2', 'body': 'thread1 description',
'room': False, 'default_rights': None,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread2 = response.json()
# check it now
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id'] == \
thread2['first_comment_id']
assert data['rooms'][0]['comments_count'] == 2
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id'] == \
thread2['first_comment_id']
assert data['rooms'][0]['comments_count'] == 1
# grant rights
response = admin.post(
thread1['url'] + 'granted_rights/',
{
'user': {'id': user.user.pk},
'access_level': models.ACCESS_READ
})
assert response.status_code == 200
# counters fixed correctly
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['last_comment']['id'] == \
thread2['first_comment_id']
assert data['rooms'][0]['comments_count'] == 2
def test_thread_ordering_by_last_comment(room_group, admin):
# create thread 1
response = admin.put(
room_group['url'], {
'title': 'thread1', 'body': 'thread1 description',
'room': False, 'default_rights': None,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread1 = response.json()
# create thread 2
response = admin.put(
room_group['url'], {
'title': 'thread2', 'body': 'thread2 description',
'room': False, 'default_rights': None,
'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread2 = response.json()
# check ordering
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert len(data['threads']) == 2
assert data['threads'][0]['id'] == thread2['id']
assert data['threads'][1]['id'] == thread1['id']
# post comment to thread 1
response = admin.post(
thread1['url'] + 'comments_page/', {
'reply_id': thread1['first_comment_id'],
'title': 'ho ho ho', 'body': 'happy new year',
'media': {},
})
assert response.status_code == 200
# check now it goes first
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert len(data['threads']) == 2
assert data['threads'][0]['id'] == thread1['id']
assert data['threads'][1]['id'] == thread2['id']
@pytest.mark.parametrize('default_rights', [models.NO_ACCESS, None])
def test_fix_counters_public_thread_and_empty_room(
superuser, room_group, user, default_rights):
# create public thread
response = superuser.put(
room_group['url'], {
'title': 'thread', 'body': 'thread description',
'room': False, 'default_rights': None, 'important': 'False',
'granted_rights': [], 'media': {}})
assert response.status_code == 200
# create room with no comments
response = superuser.put(
room_group['url'], {
'title': 'room', 'body': 'room description',
'room': True, 'default_rights': default_rights,
'granted_rights': [{
'user': {'id': user.user.pk},
'access_level': models.ACCESS_READ}]})
assert response.status_code == 200
# fix_counters
response = superuser.post(room_group['url'] + 'fix/')
assert response.status_code == 200
data = response.json()
assert data['result']['threads'] == 3
def test_fix_counters_public_room_in_middle(
admin, room_group, user, superuser):
# create public thread
response = admin.put(
room_group['url'], {
'title': 'room', 'body': 'room description',
'room': True, 'default_rights': None, 'granted_rights': []})
assert response.status_code == 200
room = response.json()
# create public thread
response = admin.put(
room['url'], {
'title': 'thread', 'body': 'thread description',
'room': False, 'default_rights': models.NO_ACCESS,
'important': False, 'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread = response.json()
# check counters initial state for simple user
response = user.get('/api/forum/')
assert response.status_code == 200
data = response.json()
data = {r['id']: r for r in data['groups']}[room_group['id']]
assert data['rooms'][0]['comments_count'] == 0
assert 'last_comment' not in data['rooms'][0]
# check first comment id
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['first_comment_id'] is None
# check counters initial state for admin user
response = admin.get('/api/forum/')
assert response.status_code == 200
data = response.json()
data = {r['id']: r for r in data['groups']}[room_group['id']]
assert data['rooms'][0]['comments_count'] == 1
assert data['rooms'][0]['last_comment']['id'] == thread['first_comment_id']
# check first comment id
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['first_comment_id'] == thread['first_comment_id']
# fix_counters
response = superuser.post(room_group['url'] + 'fix/')
assert response.status_code == 200
data = response.json()
assert data['result']['threads'] == 3
# check counters for simple user
response = user.get('/api/forum/')
assert response.status_code == 200
data = response.json()
data = {r['id']: r for r in data['groups']}[room_group['id']]
assert data['rooms'][0]['comments_count'] == 0
assert 'last_comment' not in data['rooms'][0]
# check first comment id
response = user.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['first_comment_id'] is None
# check counters for admin
response = admin.get('/api/forum/')
assert response.status_code == 200
data = response.json()
data = {r['id']: r for r in data['groups']}[room_group['id']]
assert data['rooms'][0]['comments_count'] == 1
assert data['rooms'][0]['last_comment']['id'] == thread['first_comment_id']
# check first comment id
response = admin.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['first_comment_id'] == thread['first_comment_id']
def test_comments_superuser_counters(superuser, room_group, user):
# create room with no comments
response = user.put(
room_group['url'], {
'title': 'room', 'body': 'room description',
'room': True, 'default_rights': None, 'granted_rights': []})
assert response.status_code == 200
room = response.json()
# create hidden thread
response = user.put(
room['url'], {
'title': 'thread', 'body': 'thread description',
'room': False, 'default_rights': models.NO_ACCESS,
'important': False, 'granted_rights': [], 'media': {}})
assert response.status_code == 200
thread = response.json()
# check counters by super user
response = superuser.get(room_group['url'])
assert response.status_code == 200
data = response.json()
assert data['rooms'][0]['comments_count'] == 1
assert data['rooms'][0]['last_comment']['id'] == thread['first_comment_id']
def test_closed_thread(superuser, room_group):
# create thread
response = superuser.put(
room_group['url'], {
'title': 'thread', 'body': 'thread description',
'room': False, 'default_rights': None, 'granted_rights': [],
'important': True, 'closed': True, 'media': {}})
assert response.status_code == 200
thread = response.json()
assert thread['rights']['write']
# check add comment
response = superuser.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'ho ho ho', 'body': 'happy new year',
'media': {},
})
assert response.status_code == 200
# close thread
thread['closed'] = True
response = superuser.post(thread['url'], thread)
assert response.status_code == 200
thread = response.json()
assert not thread['rights']['write']
response = superuser.post(
thread['url'] + 'comments_page/', {
'reply_id': thread['first_comment_id'],
'title': 'ho ho ho', 'body': 'happy new year',
'media': {},
})
assert response.status_code == 403
| 37.817241
| 79
| 0.600802
| 2,585
| 21,934
| 4.953965
| 0.06499
| 0.081993
| 0.117133
| 0.140559
| 0.802202
| 0.780337
| 0.752538
| 0.726769
| 0.714977
| 0.693113
| 0
| 0.02016
| 0.242409
| 21,934
| 579
| 80
| 37.882556
| 0.750496
| 0.081198
| 0
| 0.801653
| 0
| 0
| 0.205597
| 0
| 0
| 0
| 0
| 0
| 0.326446
| 1
| 0.022727
| false
| 0
| 0.014463
| 0
| 0.039256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8724c00d286dceb76669b1e4dfaefc59ded98c4b
| 43
|
py
|
Python
|
svd/__init__.py
|
t-brandt/acorns-adi
|
6645fae7878a1801beeda0c6604b01e61f37ca15
|
[
"BSD-2-Clause"
] | 1
|
2016-10-30T16:29:51.000Z
|
2016-10-30T16:29:51.000Z
|
svd/__init__.py
|
t-brandt/acorns-adi
|
6645fae7878a1801beeda0c6604b01e61f37ca15
|
[
"BSD-2-Clause"
] | null | null | null |
svd/__init__.py
|
t-brandt/acorns-adi
|
6645fae7878a1801beeda0c6604b01e61f37ca15
|
[
"BSD-2-Clause"
] | null | null | null |
from stochastic_svd import stochastic_svd
| 14.333333
| 41
| 0.883721
| 6
| 43
| 6
| 0.666667
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 43
| 2
| 42
| 21.5
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8744944593aaded956e163f09500e7803c88cdf4
| 64
|
py
|
Python
|
python/testData/psi/PatternMatchingRecoveryIncompleteOrPatterns.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/psi/PatternMatchingRecoveryIncompleteOrPatterns.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
python/testData/psi/PatternMatchingRecoveryIncompleteOrPatterns.py
|
06needhamt/intellij-community
|
63d7b8030e4fdefeb4760e511e289f7e6b3a5c5b
|
[
"Apache-2.0"
] | null | null | null |
match x:
case 1 |:
pass
case (1 |):
pass
| 12.8
| 15
| 0.375
| 8
| 64
| 3
| 0.625
| 0.416667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.515625
| 64
| 5
| 16
| 12.8
| 0.709677
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.4
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
874c13a44b109db00b9bf7790b85ee8b8c504a14
| 1,385
|
py
|
Python
|
test/espnet2/asr/specaug/test_specaug.py
|
Hertin/espnet
|
a0f2175df08b4750a9f0305c20b8c11f6e941867
|
[
"Apache-2.0"
] | 5,053
|
2017-12-13T06:21:41.000Z
|
2022-03-31T13:38:29.000Z
|
test/espnet2/asr/specaug/test_specaug.py
|
Hertin/espnet
|
a0f2175df08b4750a9f0305c20b8c11f6e941867
|
[
"Apache-2.0"
] | 3,666
|
2017-12-14T05:58:50.000Z
|
2022-03-31T22:11:49.000Z
|
test/espnet2/asr/specaug/test_specaug.py
|
Hertin/espnet
|
a0f2175df08b4750a9f0305c20b8c11f6e941867
|
[
"Apache-2.0"
] | 1,709
|
2017-12-13T01:02:42.000Z
|
2022-03-31T11:57:45.000Z
|
import pytest
import torch
from espnet2.asr.specaug.specaug import SpecAug
@pytest.mark.parametrize("apply_time_warp", [False, True])
@pytest.mark.parametrize("apply_freq_mask", [False, True])
@pytest.mark.parametrize("apply_time_mask", [False, True])
def test_SpecAuc(apply_time_warp, apply_freq_mask, apply_time_mask):
if not apply_time_warp and not apply_time_mask and not apply_freq_mask:
with pytest.raises(ValueError):
specaug = SpecAug(
apply_time_warp=apply_time_warp,
apply_freq_mask=apply_freq_mask,
apply_time_mask=apply_time_mask,
)
else:
specaug = SpecAug(
apply_time_warp=apply_time_warp,
apply_freq_mask=apply_freq_mask,
apply_time_mask=apply_time_mask,
)
x = torch.randn(2, 1000, 80)
specaug(x)
@pytest.mark.parametrize("apply_time_warp", [False, True])
@pytest.mark.parametrize("apply_freq_mask", [False, True])
@pytest.mark.parametrize("apply_time_mask", [False, True])
def test_SpecAuc_repr(apply_time_warp, apply_freq_mask, apply_time_mask):
if not apply_time_warp and not apply_time_mask and not apply_freq_mask:
return
specaug = SpecAug(
apply_time_warp=apply_time_warp,
apply_freq_mask=apply_freq_mask,
apply_time_mask=apply_time_mask,
)
print(specaug)
| 34.625
| 75
| 0.699639
| 190
| 1,385
| 4.705263
| 0.178947
| 0.241611
| 0.174497
| 0.161074
| 0.841163
| 0.841163
| 0.841163
| 0.841163
| 0.841163
| 0.841163
| 0
| 0.007326
| 0.211552
| 1,385
| 39
| 76
| 35.512821
| 0.811355
| 0
| 0
| 0.588235
| 0
| 0
| 0.064982
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.088235
| 0
| 0.176471
| 0.029412
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8764162ce9ffbac8c518bad4f3f9e8f219f7300b
| 2,948
|
py
|
Python
|
January16th/test_assignment.py
|
EricCharnesky/CIS2001-Winter2020
|
e51d967e97399248dc8b69aaed2d5ca8aee0cd6e
|
[
"MIT"
] | 3
|
2020-01-06T23:21:36.000Z
|
2021-03-01T08:36:57.000Z
|
January16th/test_assignment.py
|
EricCharnesky/CIS2001-Winter2020
|
e51d967e97399248dc8b69aaed2d5ca8aee0cd6e
|
[
"MIT"
] | null | null | null |
January16th/test_assignment.py
|
EricCharnesky/CIS2001-Winter2020
|
e51d967e97399248dc8b69aaed2d5ca8aee0cd6e
|
[
"MIT"
] | 2
|
2020-01-21T16:00:03.000Z
|
2020-05-05T14:57:34.000Z
|
from unittest import TestCase
from January16th import Assignment, AdjustedAssignment
class TestAssignment(TestCase):
def test_get_average_score(self):
# AAA
# arrange
score1 = 10
score2 = 20
score3 = 30
expected_average_score = 20
assignment = Assignment()
assignment.add_score(score1)
assignment.add_score(score2)
assignment.add_score(score3)
# act
actual_average_score = assignment.get_average_score()
# assert
self.assertEqual(expected_average_score, actual_average_score)
def test_get_min_score(self):
# AAA
# arrange
score1 = 10
score2 = 20
score3 = 30
expected_min_score = 10
assignment = Assignment()
assignment.add_score(score1)
assignment.add_score(score2)
assignment.add_score(score3)
# act
actual_min_score = assignment.get_min_score()
# assert
self.assertEqual(expected_min_score, actual_min_score)
def test_get_max_score(self):
# AAA
# arrange
score1 = 10
score2 = 20
score3 = 30
expected_max_score = 30
assignment = Assignment()
assignment.add_score(score1)
assignment.add_score(score2)
assignment.add_score(score3)
# act
actual_max_score = assignment.get_max_score()
# assert
self.assertEqual(expected_max_score, actual_max_score)
def test_get_adjusted_average_score(self):
# AAA
# arrange
score1 = 10
score2 = 20
score3 = 30
expected_average_score = 90
assignment = AdjustedAssignment()
assignment.add_score(score1)
assignment.add_score(score2)
assignment.add_score(score3)
# act
actual_average_score = assignment.get_average_score()
# assert
self.assertEqual(expected_average_score, actual_average_score)
def test_get_adjusted_min_score(self):
# AAA
# arrange
score1 = 10
score2 = 20
score3 = 30
expected_min_score = 80
assignment = AdjustedAssignment()
assignment.add_score(score1)
assignment.add_score(score2)
assignment.add_score(score3)
# act
actual_min_score = assignment.get_min_score()
# assert
self.assertEqual(expected_min_score, actual_min_score)
def test_get_adjusted_max_score(self):
# AAA
# arrange
score1 = 10
score2 = 20
score3 = 30
expected_max_score = 100
assignment = AdjustedAssignment()
assignment.add_score(score1)
assignment.add_score(score2)
assignment.add_score(score3)
# act
actual_max_score = assignment.get_max_score()
# assert
self.assertEqual(expected_max_score, actual_max_score)
| 24.773109
| 70
| 0.620081
| 309
| 2,948
| 5.595469
| 0.113269
| 0.135338
| 0.187392
| 0.065934
| 0.927704
| 0.908039
| 0.908039
| 0.908039
| 0.908039
| 0.908039
| 0
| 0.043112
| 0.315468
| 2,948
| 118
| 71
| 24.983051
| 0.813677
| 0.046472
| 0
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 1
| 0.086957
| false
| 0
| 0.028986
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5eb07f632cc384f1216855705075020038325002
| 120
|
py
|
Python
|
docs/en/docs_src/get_me/get_me_package.py
|
AliRn76/rubika-bot
|
203da2e585f03d6b2cef96cbd7a68b471e010db7
|
[
"MIT"
] | 1
|
2022-03-30T10:33:33.000Z
|
2022-03-30T10:33:33.000Z
|
docs/fa/docs_src/get_me/get_me_package.py
|
AliRn76/rubika-bot
|
203da2e585f03d6b2cef96cbd7a68b471e010db7
|
[
"MIT"
] | null | null | null |
docs/fa/docs_src/get_me/get_me_package.py
|
AliRn76/rubika-bot
|
203da2e585f03d6b2cef96cbd7a68b471e010db7
|
[
"MIT"
] | null | null | null |
from rubika_bot.requests import get_me
from rubika_bot.models import Bot
bot: Bot = get_me(token='SUPER_SECRET_TOKEN')
| 24
| 45
| 0.816667
| 21
| 120
| 4.380952
| 0.52381
| 0.217391
| 0.282609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108333
| 120
| 4
| 46
| 30
| 0.859813
| 0
| 0
| 0
| 0
| 0
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5ed74c8be50250a9ddedce6534b4c67a0b6da011
| 879,306
|
py
|
Python
|
python/M2M_Toolbox.py
|
crisien/OOI_M2M_Toolbox
|
8e83b641fb164488ddc3166bdb3eb51251113e7b
|
[
"MIT"
] | null | null | null |
python/M2M_Toolbox.py
|
crisien/OOI_M2M_Toolbox
|
8e83b641fb164488ddc3166bdb3eb51251113e7b
|
[
"MIT"
] | null | null | null |
python/M2M_Toolbox.py
|
crisien/OOI_M2M_Toolbox
|
8e83b641fb164488ddc3166bdb3eb51251113e7b
|
[
"MIT"
] | 1
|
2020-04-07T21:32:28.000Z
|
2020-04-07T21:32:28.000Z
|
import numpy as np
import os
import re
import requests
import sys
import time
from netCDF4 import Dataset
import pandas as pd
from bs4 import BeautifulSoup
from tqdm import tqdm
# setup constants used to access the data from the different M2M interfaces
BASE_URL = 'https://ooinet.oceanobservatories.org/api/m2m/' # base M2M URL
SENSOR_URL = '12576/sensor/inv/' # Sensor Information
# setup access credentials
AUTH = ['OOIAPI-853A3LA6QI3L62', 'WYAN89W5X4Z0QZ']
def M2M_Call(uframe_dataset_name, start_date, end_date):
options = '?beginDT=' + start_date + '&endDT=' + end_date + '&format=application/netcdf'
r = requests.get(BASE_URL + SENSOR_URL + uframe_dataset_name + options, auth=(AUTH[0], AUTH[1]))
if r.status_code == requests.codes.ok:
data = r.json()
else:
return None
# wait until the request is completed
print('Waiting for OOINet to process and prepare data request, this may take up to 20 minutes')
url = [url for url in data['allURLs'] if re.match(r'.*async_results.*', url)][0]
check_complete = url + '/status.txt'
with tqdm(total=400, desc='Waiting') as bar:
for i in range(400):
r = requests.get(check_complete)
bar.update(1)
if r.status_code == requests.codes.ok:
bar.n = 400
bar.last_print_n = 400
bar.refresh()
print('\nrequest completed in %f minutes.' % elapsed)
break
else:
time.sleep(3)
elapsed = (i * 3) / 60
return data
def M2M_Files(data, tag=''):
"""
Use a regex tag combined with the results of the M2M data request to collect the data from the THREDDS catalog.
Collected data is gathered into an xarray dataset for further processing.
:param data: JSON object returned from M2M data request with details on where the data is to be found for download
:param tag: regex tag to use in discriminating the data files, so we only collect the correct ones
:return: the collected data as an xarray dataset
"""
# Create a list of the files from the request above using a simple regex as a tag to discriminate the files
url = [url for url in data['allURLs'] if re.match(r'.*thredds.*', url)][0]
files = list_files(url, tag)
return files
def list_files(url, tag=''):
"""
Function to create a list of the NetCDF data files in the THREDDS catalog created by a request to the M2M system.
:param url: URL to user's THREDDS catalog specific to a data request
:param tag: regex pattern used to distinguish files of interest
:return: list of files in the catalog with the URL path set relative to the catalog
"""
page = requests.get(url).text
soup = BeautifulSoup(page, 'html.parser')
pattern = re.compile(tag)
return [node.get('href') for node in soup.find_all('a', text=pattern)]
def M2M_Data(nclist,variables):
thredds = 'https://opendap.oceanobservatories.org/thredds/dodsC/ooi/'
#nclist is going to contain more than one url eventually
for jj in range(len(nclist)):
url=nclist[jj]
url=url[25:]
dap_url = thredds + url + '#fillmismatch'
openFile = Dataset(dap_url,'r')
for ii in range(len(variables)):
dum = openFile.variables[variables[ii].name]
variables[ii].data = np.append(variables[ii].data, dum[:].data)
tmp = variables[0].data/60/60/24
time_converted = pd.to_datetime(tmp, unit='D', origin=pd.Timestamp('1900-01-01'))
return variables, time_converted
class var(object):
def __init__(self):
"""A Class that generically holds data with a variable name
and the units as attributes"""
self.name = ''
self.data = np.array([])
self.units = ''
def __repr__(self):
return_str = "name: " + self.name + '\n'
return_str += "units: " + self.units + '\n'
return_str += "data: size: " + str(self.data.shape)
return return_str
class structtype(object):
def __init__(self):
""" A class that imitates a Matlab structure type
"""
self._data = []
def __getitem__(self, index):
"""implement index behavior in the struct"""
if index == len(self._data):
self._data.append(var())
return self._data[index]
def __len__(self):
return len(self._data)
def M2M_URLs(platform_name,node,instrument_class,method):
var_list = structtype()
#MOPAK
if platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#ZPLSC
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#VEL3DK
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PARAD
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
##
#MOPAK
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/recovered_host/fdchp_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
#uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/recovered_inst/fdchp_a_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/recovered_inst/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/recovered_inst/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_inst/adcpt_m_instrument_log9_recovered'
var_list[0].name = 'time'
var_list[1].name = 'significant_wave_height'
var_list[2].name = 'peak_wave_period'
var_list[3].name = 'peak_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'seconds'
var_list[3].units = 'degrees'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_inst/adcpt_m_instrument_log9_recovered'
var_list[0].name = 'time'
var_list[1].name = 'significant_wave_height'
var_list[2].name = 'peak_wave_period'
var_list[3].name = 'peak_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'seconds'
var_list[3].units = 'degrees'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/06-CTDBPN106/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_no_seawater_pressure'
var_list[5].name = 'ctdbp_no_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/06-CTDBPO108/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_no_seawater_pressure'
var_list[5].name = 'ctdbp_no_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/06-CTDBPN106/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/06-CTDBPO108/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/10-PHSEND103/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/10-PHSEND107/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/09-PCO2WB103/streamed/pco2w_b_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/09-PCO2WB104/streamed/pco2w_b_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'ADCP' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/05-ADCPTB104/streamed/adcp_velocity_beam'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'ADCP' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/05-ADCPSI103/streamed/adcp_velocity_beam'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'VEL3D' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/07-VEL3DC108/streamed/vel3d_cd_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'VEL3D' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/07-VEL3DC107/streamed/vel3d_cd_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'OPTAA' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/08-OPTAAD106/streamed/optaa_sample'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'OPTAA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/08-OPTAAC104/streamed/optaa_sample'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#CSPP Data below
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/08-FLORTJ000/telemetered/flort_dj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/08-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/08-FLORTJ000/telemetered/flort_dj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/08-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/02-DOSTAJ000/telemetered/dosta_abcdjm_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/02-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/02-DOSTAJ000/telemetered/dosta_abcdjm_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/02-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/09-CTDPFJ000/telemetered/ctdpf_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/09-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/09-CTDPFJ000/telemetered/ctdpf_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/09-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/10-PARADJ000/telemetered/parad_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/10-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/10-PARADJ000/telemetered/parad_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/10-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/06-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/06-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/07-SPKIRJ000/telemetered/spkir_abj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/07-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/07-SPKIRJ000/telemetered/spkir_abj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/07-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/05-VELPTJ000/telemetered/velpt_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/05-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/05-VELPTJ000/telemetered/velpt_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/05-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/07-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/07-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/01-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/01-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/08-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/08-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/09-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/09-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/05-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/05-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/06-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/06-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/02-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/02-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2A-CTDPFA107/streamed/ctdpf_sbe43_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'seawater_pressure'
var_list[5].name = 'seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/01-CTDPFL105/recovered_inst/dpc_ctd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'dpc_ctd_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/01-CTDPFL105/recovered_wfp/dpc_ctd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'dpc_ctd_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2A-CTDPFA107/streamed/ctdpf_sbe43_sample'
var_list[0].name = 'time'
var_list[1].name = 'corrected_dissolved_oxygen'
var_list[2].name = 'seawater_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/06-DOSTAD105/recovered_inst/dpc_optode_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/06-DOSTAD105/recovered_wfp/dpc_optode_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3A-FLORTD104/streamed/flort_d_data_record'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/04-FLNTUA103/recovered_inst/dpc_flnturtd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'flntu_x_mmp_cds_fluorometric_chlorophyll_a'
var_list[2].name = 'flntu_x_mmp_cds_total_volume_scattering_coefficient '
var_list[3].name = 'flntu_x_mmp_cds_bback_total'
var_list[4].name = 'flcdr_x_mmp_cds_fluorometric_cdom'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'ug/L'
var_list[2].units = 'm-1 sr-1'
var_list[3].units = 'm-1'
var_list[4].units = 'ppb'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/03-FLCDRA103/recovered_wfp/dpc_flcdrtd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'flntu_x_mmp_cds_fluorometric_chlorophyll_a'
var_list[2].name = 'flntu_x_mmp_cds_total_volume_scattering_coefficient '
var_list[3].name = 'flntu_x_mmp_cds_bback_total'
var_list[4].name = 'flcdr_x_mmp_cds_fluorometric_cdom'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'ug/L'
var_list[2].units = 'm-1 sr-1'
var_list[3].units = 'm-1'
var_list[4].units = 'ppb'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2B-PHSENA108/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3C-PARADA102/streamed/parad_sa_sample'
var_list[0].name = 'time'
var_list[1].name = 'par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3D-SPKIRA102/streamed/spkir_data_record'
var_list[0].name = 'time'
var_list[1].name = 'spkir_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4A-NUTNRA102/streamed/nutnr_a_sample'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4F-PCO2WA102/streamed/pco2w_a_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4B-VELPTD106/streamed/velpt_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'velpt_d_eastward_velocity'
var_list[2].name = 'velpt_d_northward_velocity'
var_list[3].name = 'velpt_d_upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[9].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
var_list[9].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/02-VEL3DA105/recovered_inst/dpc_acm_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_a_eastward_velocity'
var_list[2].name = 'vel3d_a_northward_velocity'
var_list[3].name = 'vel3d_a_upward_velocity_ascending'
var_list[4].name = 'vel3d_a_upward_velocity_descending'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'm/s'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/02-VEL3DA105/recovered_wfp/dpc_acm_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_a_eastward_velocity'
var_list[2].name = 'vel3d_a_northward_velocity'
var_list[3].name = 'vel3d_a_upward_velocity_ascending'
var_list[4].name = 'vel3d_a_upward_velocity_descending'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'm/s'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4A-CTDPFA109/streamed/ctdpf_optode_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'seawater_pressure'
var_list[5].name = 'seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'DOSTA' and method == 'Streamed':
#uframe_dataset_name = 'CE04OSPS/PC01B/4A-DOSTAD109/streamed/ctdpf_optode_sample'
uframe_dataset_name = 'CE04OSPS/PC01B/4A-CTDPFA109/streamed/ctdpf_optode_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'seawater_pressure' #also use this for the '4A-DOSTAD109/streamed/ctdpf_optode_sample' stream
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4B-PHSENA106/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4D-PCO2WA105/streamed/pco2w_a_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#Coastal Pioneer CSM Data Streams
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#WAVSS
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
#PCO2A
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PCO2A
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#FDCHP
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/recovered_inst/fdchp_a_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/recovered_host/fdchp_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD37/03-CTDBPD000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD37/03-CTDBPD000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD37/03-CTDBPD000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD37/03-CTDBPD000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD37/03-CTDBPD000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD37/03-CTDBPD000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD35/02-PRESFB000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD35/02-PRESFB000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD35/02-PRESFB000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD35/02-PRESFB000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD35/02-PRESFB000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD35/02-PRESFB000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD35/02-PRESFC000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD35/02-PRESFC000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD35/02-PRESFC000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD35/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD35/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD35/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD35/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD35/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD35/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD35/04-VELPTB000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD35/04-VELPTB000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD35/04-VELPTB000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD37/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD37/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD37/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD37/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD35/01-ADCPTF000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD35/01-ADCPTF000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD35/01-ADCPTF000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD35/01-ADCPTF000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD35/01-ADCPSJ000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD35/01-ADCPSJ000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#Coastal Pioneer WireFollowing Profilers (WFP
elif platform_name == 'CP04OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/SBS11/02-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSPM/SBS11/02-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP04OSPM/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP04OSPM/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP04OSPM/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP04OSPM/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP04OSPM/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP01CNPM/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP01CNPM/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP01CNPM/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP01CNPM/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP01CNPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP01CNPM/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMCI/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCI/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCI/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCI/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCI/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMCI' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCI/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMCO/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCO/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCO/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCO/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCO/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMCO' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMCO/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMUI/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUI/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUI/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUI/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUI/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMUI' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUI/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMUO/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUO/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUO/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUO/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUO/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP02PMUO' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP02PMUO/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP03ISPM/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP03ISPM/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP03ISPM/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP03ISPM/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP03ISPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CP03ISPM/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CP04OSPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSPM/RII01/02-ADCPSL010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP04OSPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSPM/RII01/02-ADCPSL010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP04OSPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/RII01/02-ADCPSL010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP01CNPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNPM/RII01/02-ADCPTG010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP01CNPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNPM/RII01/02-ADCPTG010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP01CNPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/RII01/02-ADCPTG010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMCI' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP02PMCI/RII01/02-ADCPTG010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMCI' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMCI/RII01/02-ADCPTG010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMCI' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/RII01/02-ADCPTG010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMCO' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP02PMCO/RII01/02-ADCPTG010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMCO' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMCO/RII01/02-ADCPTG010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMCO' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/RII01/02-ADCPTG010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMUI' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP02PMUI/RII01/02-ADCPTG010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMUI' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMUI/RII01/02-ADCPTG010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMUI' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/RII01/02-ADCPTG010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMUO' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP02PMUO/RII01/02-ADCPSL010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMUO' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMUO/RII01/02-ADCPSL010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP02PMUO' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/RII01/02-ADCPSL010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP03ISPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISPM/RII01/02-ADCPTG010/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP03ISPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISPM/RII01/02-ADCPTG010/recovered_host/adcps_jln_stc_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CP03ISPM' and node == 'RISER' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/RII01/02-ADCPTG010/telemetered/adcps_jln_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'adcps_jln_heading'
var_list[3].name = 'adcps_jln_pitch'
var_list[4].name = 'adcps_jln_roll'
var_list[5].name = 'adcps_jln_eastward_seawater_velocity2'
var_list[6].name = 'adcps_jln_northward_seawater_velocity2'
var_list[7].name = 'adcps_jln_upward_seawater_velocity2'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'cdegree'
var_list[3].units = 'cdegree'
var_list[4].units = 'cdegree'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL336/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL336/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL336/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL336/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL336/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL336/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL336/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL336/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL336' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL336/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL388/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL388/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL388/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL388/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL388/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL388/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL388/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL388/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL388' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL388/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL335/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL335/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL335/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL335/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL335/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL335/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL335/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL335/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL335' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL335/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL339/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL339/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL339/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL339/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL339/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL339/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL339/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL339/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL339' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL339/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL340/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL340/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL340/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL340/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL340/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL340/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL340/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL340/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL340' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL340/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL374/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL374/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL374/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL374/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL374/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL374/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL374/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL374/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL374' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL374/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL375/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL375/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL375/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL375/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL375/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL375/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL375/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL375/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL375' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL375/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL376/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL376/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL376/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL376/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL376/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL376/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL376/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL376/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL376' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL376/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL379/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL379/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL379/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL379/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL379/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL379/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL379/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL379/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL379' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL379/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL380/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL380/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL380/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL380/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL380/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL380/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL380/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL380/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL380' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL380/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL387/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL387/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL387/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL387/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL387/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL387/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL387/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL387/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL387' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL387/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL389/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL389/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL389/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL389/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL389/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL389/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL389/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL389/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL389' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL389/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL514/03-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL514/03-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL514/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL514/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL514/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL514/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CP05MOAS/GL514/05-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL514/05-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CPGL514' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP05MOAS/GL514/01-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCI/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMCI/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMCO/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMCO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMCO/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUI/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUI' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMUI/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP02PMUO/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP02PMUO' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP02PMUO/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
else:
print('Illegal platform_name, node, instrument_class, method or combination thereof.')
return(uframe_dataset_name,var_list)
| 45.315708
| 123
| 0.639777
| 133,550
| 879,306
| 3.968394
| 0.005354
| 0.228658
| 0.132005
| 0.152421
| 0.993956
| 0.993887
| 0.993615
| 0.993087
| 0.984416
| 0.984211
| 0
| 0.055564
| 0.189092
| 879,306
| 19,404
| 124
| 45.315708
| 0.687707
| 0.002088
| 0
| 0.899063
| 0
| 0
| 0.263024
| 0.117856
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000518
| false
| 0
| 0.000518
| 0.000052
| 0.001553
| 0.000207
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0d6d383fc839b1893db48eacde5ca62abeb3441e
| 81,928
|
py
|
Python
|
alerter/test/monitors/managers/test_github.py
|
SimplyVC/panic
|
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
|
[
"Apache-2.0"
] | 41
|
2019-08-23T12:40:42.000Z
|
2022-03-28T11:06:02.000Z
|
alerter/test/monitors/managers/test_github.py
|
SimplyVC/panic
|
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
|
[
"Apache-2.0"
] | 147
|
2019-08-30T22:09:48.000Z
|
2022-03-30T08:46:26.000Z
|
alerter/test/monitors/managers/test_github.py
|
SimplyVC/panic
|
2f5c327ea0d14b6a49dc8f4599a255048bc2ff6d
|
[
"Apache-2.0"
] | 3
|
2019-09-03T21:12:28.000Z
|
2021-08-18T14:27:56.000Z
|
import copy
import json
import logging
import multiprocessing
import time
import unittest
from datetime import timedelta, datetime
from multiprocessing import Process
from unittest import mock
import pika
import pika.exceptions
from freezegun import freeze_time
from src.configs.repo import RepoConfig
from src.message_broker.rabbitmq import RabbitMQApi
from src.monitors.managers.github import GitHubMonitorsManager
from src.monitors.starters import start_github_monitor
from src.utils import env
from src.utils.constants.names import GITHUB_MONITOR_NAME_TEMPLATE
from src.utils.constants.rabbitmq import (GH_MON_MAN_CONFIGS_QUEUE_NAME,
GH_MON_MAN_CONFIGS_ROUTING_KEY_GEN,
GH_MON_MAN_HEARTBEAT_QUEUE_NAME,
GH_MON_MAN_CONFIGS_ROUTING_KEY_CHAINS,
HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY,
HEALTH_CHECK_EXCHANGE,
CONFIG_EXCHANGE, PING_ROUTING_KEY,
TOPIC)
from src.utils.exceptions import PANICException
from src.utils.types import str_to_bool
from test.utils.utils import infinite_fn
class TestGitHubMonitorsManager(unittest.TestCase):
def setUp(self) -> None:
self.dummy_logger = logging.getLogger('Dummy')
self.dummy_logger.disabled = True
self.connection_check_time_interval = timedelta(seconds=0)
self.rabbit_ip = env.RABBIT_IP
self.rabbitmq = RabbitMQApi(
self.dummy_logger, self.rabbit_ip,
connection_check_time_interval=self.connection_check_time_interval)
self.manager_name = 'test_github_monitors_manager'
self.test_queue_name = 'Test Queue'
self.test_data_str = 'test data'
self.test_heartbeat = {
'component_name': 'Test Component',
'is_alive': True,
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.dummy_process1 = Process(target=infinite_fn, args=())
self.dummy_process1.daemon = True
self.dummy_process2 = Process(target=infinite_fn, args=())
self.dummy_process2.daemon = True
self.dummy_process3 = Process(target=infinite_fn, args=())
self.dummy_process3.daemon = True
self.config_process_dict_example = {
'config_id1': {
'component_name': GITHUB_MONITOR_NAME_TEMPLATE.format('repo_1'),
'process': self.dummy_process1,
'chain': 'Substrate Polkadot'
},
'config_id2': {
'component_name': GITHUB_MONITOR_NAME_TEMPLATE.format('repo_2'),
'process': self.dummy_process2,
'chain': 'general'
},
}
self.github_repos_configs_example = {
'Substrate Polkadot': {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'repo_1',
'monitor_repo': "True",
}
},
'general': {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'repo_2',
'monitor_repo': "True",
}
},
}
self.sent_configs_example_chain = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'repo_1',
'monitor_repo': "True",
}
}
self.sent_configs_example_general = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'repo_2',
'monitor_repo': "True",
}
}
self.repo_id_new = 'config_id3'
self.parent_id_new = 'chain_1'
self.repo_name_new = 'repo_3/'
self.monitor_repo_new = True
self.chain_example_new = 'Substrate Polkadot'
self.releases_page_new = \
env.GITHUB_RELEASES_TEMPLATE.format(self.repo_name_new)
self.repo_config_example = RepoConfig(self.repo_id_new,
self.parent_id_new,
self.repo_name_new,
self.monitor_repo_new,
self.releases_page_new)
self.test_manager = GitHubMonitorsManager(
self.dummy_logger, self.manager_name, self.rabbitmq)
self.chains_routing_key = \
'chains.Substrate.Polkadot.github_repos_config'
self.general_routing_key = GH_MON_MAN_CONFIGS_ROUTING_KEY_GEN
self.test_exception = PANICException('test_exception', 1)
def tearDown(self) -> None:
# Delete any queues and exchanges which are common across many tests
try:
self.test_manager.rabbitmq.connect()
# Declare them before just in case there are tests which do not
# use these queues and exchanges
self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.test_manager.rabbitmq.queue_declare(
GH_MON_MAN_HEARTBEAT_QUEUE_NAME, False, True, False, False)
self.test_manager.rabbitmq.queue_declare(
GH_MON_MAN_CONFIGS_QUEUE_NAME, False, True, False, False)
self.test_manager.rabbitmq.exchange_declare(
CONFIG_EXCHANGE, TOPIC, False, True, False, False)
self.test_manager.rabbitmq.exchange_declare(
HEALTH_CHECK_EXCHANGE, TOPIC, False, True, False, False)
self.test_manager.rabbitmq.queue_purge(self.test_queue_name)
self.test_manager.rabbitmq.queue_purge(
GH_MON_MAN_HEARTBEAT_QUEUE_NAME)
self.test_manager.rabbitmq.queue_purge(
GH_MON_MAN_CONFIGS_QUEUE_NAME)
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
self.test_manager.rabbitmq.queue_delete(
GH_MON_MAN_HEARTBEAT_QUEUE_NAME)
self.test_manager.rabbitmq.queue_delete(
GH_MON_MAN_CONFIGS_QUEUE_NAME)
self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
self.test_manager.rabbitmq.exchange_delete(CONFIG_EXCHANGE)
self.test_manager.rabbitmq.disconnect()
except Exception as e:
print("Deletion of queues and exchanges failed: {}".format(e))
self.dummy_logger = None
self.rabbitmq = None
self.config_process_dict_example = None
self.github_repos_configs_example = None
self.repo_config_example = None
self.test_manager = None
self.test_exception = None
self.dummy_process1 = None
self.dummy_process2 = None
self.dummy_process3 = None
def test_str_returns_manager_name(self) -> None:
self.assertEqual(self.manager_name, str(self.test_manager))
def test_config_process_dict_returns_config_process_dict(self) -> None:
self.test_manager._config_process_dict = \
self.config_process_dict_example
self.assertEqual(self.config_process_dict_example,
self.test_manager.config_process_dict)
def test_name_returns_manager_name(self) -> None:
self.assertEqual(self.manager_name, self.test_manager.name)
def test_github_repos_configs_returns_github_repos_configs(self) -> None:
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.assertEqual(self.github_repos_configs_example,
self.test_manager.github_repos_configs)
@mock.patch.object(RabbitMQApi, "start_consuming")
def test_listen_for_data_calls_start_consuming(
self, mock_start_consuming) -> None:
mock_start_consuming.return_value = None
self.test_manager._listen_for_data()
mock_start_consuming.assert_called_once()
def test_initialise_rabbitmq_initialises_everything_as_expected(
self) -> None:
try:
# To make sure that there is no connection/channel already
# established
self.assertIsNone(self.rabbitmq.connection)
self.assertIsNone(self.rabbitmq.channel)
# To make sure that the exchanges and queues have not already been
# declared
self.rabbitmq.connect()
self.test_manager.rabbitmq.queue_delete(
GH_MON_MAN_HEARTBEAT_QUEUE_NAME)
self.test_manager.rabbitmq.queue_delete(
GH_MON_MAN_CONFIGS_QUEUE_NAME)
self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
self.test_manager.rabbitmq.exchange_delete(CONFIG_EXCHANGE)
self.rabbitmq.disconnect()
self.test_manager._initialise_rabbitmq()
# Perform checks that the connection has been opened, marked as open
# and that the delivery confirmation variable is set.
self.assertTrue(self.test_manager.rabbitmq.is_connected)
self.assertTrue(self.test_manager.rabbitmq.connection.is_open)
self.assertTrue(
self.test_manager.rabbitmq.channel._delivery_confirmation)
# Check whether the exchanges and queues have been creating by
# sending messages with the same routing keys as for the queues. We
# will also check if the size of the queues is 0 to confirm that
# basic_consume was called (it will store the msg in the component
# memory immediately). If one of the exchanges or queues is not
# created, then an exception will be thrown. Note when deleting the
# exchanges in the beginning we also released every binding, hence
# there is no other queue binded with the same routing key to any
# exchange at this point.
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=HEALTH_CHECK_EXCHANGE,
routing_key=PING_ROUTING_KEY, body=self.test_data_str,
is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=CONFIG_EXCHANGE,
routing_key=GH_MON_MAN_CONFIGS_ROUTING_KEY_CHAINS,
body=self.test_data_str, is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
self.test_manager.rabbitmq.basic_publish_confirm(
exchange=CONFIG_EXCHANGE,
routing_key=GH_MON_MAN_CONFIGS_ROUTING_KEY_GEN,
body=self.test_data_str, is_body_dict=False,
properties=pika.BasicProperties(delivery_mode=2),
mandatory=True)
# Re-declare queue to get the number of messages
res = self.test_manager.rabbitmq.queue_declare(
GH_MON_MAN_HEARTBEAT_QUEUE_NAME, False, True, False, False)
self.assertEqual(0, res.method.message_count)
res = self.test_manager.rabbitmq.queue_declare(
GH_MON_MAN_CONFIGS_QUEUE_NAME, False, True, False, False)
self.assertEqual(0, res.method.message_count)
except Exception as e:
self.fail("Test failed: {}".format(e))
def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# heartbeat is received
try:
self.test_manager._initialise_rabbitmq()
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._send_heartbeat(self.test_heartbeat)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is actually the HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
self.assertEqual(self.test_heartbeat, json.loads(body))
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing, 'Process')
def test_create_and_start_monitor_process_stores_the_correct_process_info(
self, mock_init, mock_start) -> None:
mock_start.return_value = None
mock_init.return_value = self.dummy_process3
self.test_manager._config_process_dict = \
self.config_process_dict_example
expected_output = {
'config_id1': {
'component_name': GITHUB_MONITOR_NAME_TEMPLATE.format('repo_1'),
'process': self.dummy_process1,
'chain': 'Substrate Polkadot'
},
'config_id2': {
'component_name': GITHUB_MONITOR_NAME_TEMPLATE.format('repo_2'),
'process': self.dummy_process2,
'chain': 'general'
},
self.repo_id_new: {}
}
new_entry = expected_output[self.repo_id_new]
new_entry['component_name'] = GITHUB_MONITOR_NAME_TEMPLATE.format(
self.repo_name_new.replace('/', ' ')[:-1])
new_entry['chain'] = self.chain_example_new
new_entry['process'] = self.dummy_process3
self.test_manager._create_and_start_monitor_process(
self.repo_config_example, self.repo_id_new, self.chain_example_new)
self.assertEqual(
expected_output, self.test_manager.config_process_dict)
@mock.patch.object(multiprocessing.Process, "start")
def test_create_and_start_monitor_process_creates_the_correct_process(
self, mock_start) -> None:
mock_start.return_value = None
self.test_manager._create_and_start_monitor_process(
self.repo_config_example, self.repo_id_new, self.chain_example_new)
new_entry = self.test_manager.config_process_dict[self.repo_id_new]
new_entry_process = new_entry['process']
self.assertTrue(new_entry_process.daemon)
self.assertEqual(1, len(new_entry_process._args))
self.assertEqual(self.repo_config_example, new_entry_process._args[0])
self.assertEqual(start_github_monitor, new_entry_process._target)
@mock.patch("src.monitors.starters.create_logger")
def test_create_and_start_monitor_process_starts_the_process(
self, mock_create_logger) -> None:
mock_create_logger.return_value = self.dummy_logger
self.test_manager._create_and_start_monitor_process(
self.repo_config_example, self.repo_id_new, self.chain_example_new)
# We need to sleep to give some time for the monitor to be initialised,
# otherwise the process would not terminate
time.sleep(1)
new_entry = self.test_manager.config_process_dict[self.repo_id_new]
new_entry_process = new_entry['process']
self.assertTrue(new_entry_process.is_alive())
new_entry_process.terminate()
new_entry_process.join()
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_default_key(self, mock_ack) -> None:
# This test will pass if the stored repos config does not change.
# This would mean that the DEFAULT key was ignored, otherwise, it would
# have been included as a new config.
mock_ack.return_value = None
old_github_repos_configs = copy.deepcopy(
self.github_repos_configs_example)
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
# We will pass the acceptable schema as a value to make sure that the
# default key will never be added. By passing the schema we will also
# prevent processing errors from happening.
self.sent_configs_example_chain['DEFAULT'] = {
'id': 'default_id1',
'parent_id': 'chain_1',
'repo_name': 'default_repo_1',
'monitor_repo': "True",
}
self.sent_configs_example_general['DEFAULT'] = {
'id': 'default_id2',
'parent_id': 'GENERAL',
'repo_name': 'default_repo_2',
'monitor_repo': "True",
}
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain = json.dumps(self.sent_configs_example_chain)
body_general = json.dumps(self.sent_configs_example_general)
properties = pika.spec.BasicProperties()
# We will send the message twice with both general and chain
# routing keys to make sure that the DEFAULT key is ignored in both
# cases
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general)
self.assertEqual(old_github_repos_configs,
self.test_manager.github_repos_configs)
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain)
self.assertEqual(old_github_repos_configs,
self.test_manager.github_repos_configs)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(GitHubMonitorsManager,
"_create_and_start_monitor_process")
def test_process_configs_stores_new_configs_to_be_monitored_correctly(
self, startup_mock, mock_ack) -> None:
# We will check whether new configs are added to the state. Since some
# new configs have `monitor_repo = False` we are also testing that
# new configs are ignored if they should not be monitored.
mock_ack.return_value = None
startup_mock.return_value = None
new_configs_chain = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'repo_1',
'monitor_repo': "True",
},
'config_id3': {
'id': 'config_id3',
'parent_id': 'chain_1',
'repo_name': 'repo_3',
'monitor_repo': "True",
},
'config_id4': {
'id': 'config_id4',
'parent_id': 'chain_1',
'repo_name': 'repo_4',
'monitor_repo': "False",
}
}
new_configs_general = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'repo_2',
'monitor_repo': "True",
},
'config_id5': {
'id': 'config_id5',
'parent_id': 'GENERAL',
'repo_name': 'repo_5',
'monitor_repo': "True",
},
'config_id6': {
'id': 'config_id6',
'parent_id': 'GENERAL',
'repo_name': 'repo_6',
'monitor_repo': "False",
}
}
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
body_new_configs_chain = json.dumps(new_configs_chain)
body_new_configs_general = json.dumps(new_configs_general)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
expected_output = copy.deepcopy(self.github_repos_configs_example)
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
expected_output['Substrate Polkadot']['config_id3'] = \
new_configs_chain['config_id3']
expected_output['general']['config_id5'] = \
new_configs_general['config_id5']
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(GitHubMonitorsManager,
"_create_and_start_monitor_process")
@mock.patch.object(multiprocessing.Process, "terminate")
@mock.patch.object(multiprocessing.Process, "join")
def test_process_configs_stores_modified_configs_to_be_monitored_correctly(
self, join_mock, terminate_mock, startup_mock, mock_ack) -> None:
# In this test we will check that modified configurations with
# `monitor_repo = True` are stored correctly in the state. Some
# configurations will have `monitor_repo = False` to check whether the
# monitor associated with the previous configuration is terminated.
mock_ack.return_value = None
startup_mock.return_value = None
join_mock.return_value = None
terminate_mock.return_value = None
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
new_configs_chain_monitor_true = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'new_repo_name_chain',
'monitor_repo': "True",
},
}
new_configs_chain_monitor_false = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'new_repo_name_chain',
'monitor_repo': "False",
},
}
new_configs_general_monitor_true = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'new_repo_name_general',
'monitor_repo': "True",
},
}
new_configs_general_monitor_false = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'new_repo_name_general',
'monitor_repo': "false",
},
}
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_mon_true = json.dumps(new_configs_chain_monitor_true)
body_general_mon_true = json.dumps(
new_configs_general_monitor_true)
body_chain_mon_false = json.dumps(new_configs_chain_monitor_false)
body_general_mon_false = json.dumps(
new_configs_general_monitor_false)
properties = pika.spec.BasicProperties()
expected_output = copy.deepcopy(self.github_repos_configs_example)
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_mon_true)
expected_output['Substrate Polkadot']['config_id1'] = \
new_configs_chain_monitor_true['config_id1']
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_general_mon_true)
expected_output['general']['config_id2'] = \
new_configs_general_monitor_true['config_id2']
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_chain_mon_false)
expected_output['Substrate Polkadot'] = {}
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.assertTrue(
'config_id1' not in self.test_manager.config_process_dict)
self.test_manager._process_configs(
blocking_channel, method_general, properties,
body_general_mon_false)
expected_output['general'] = {}
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.assertTrue(
'config_id2' not in self.test_manager.config_process_dict)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(multiprocessing.Process, "terminate")
@mock.patch.object(multiprocessing.Process, "join")
def test_process_configs_removes_deleted_configs_from_state_correctly(
self, join_mock, terminate_mock, mock_ack) -> None:
# In this test we will check that removed configurations are actually
# removed from the state
mock_ack.return_value = None
join_mock.return_value = None
terminate_mock.return_value = None
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
new_configs_chain = {}
new_configs_general = {}
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain = json.dumps(new_configs_chain)
body_general = json.dumps(new_configs_general)
properties = pika.spec.BasicProperties()
expected_output = copy.deepcopy(self.github_repos_configs_example)
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain)
expected_output['Substrate Polkadot'] = {}
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.assertTrue(
'config_id1' not in self.test_manager.config_process_dict)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general)
expected_output['general'] = {}
self.assertEqual(
expected_output, self.test_manager.github_repos_configs)
self.assertTrue(
'config_id2' not in self.test_manager.config_process_dict)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(GitHubMonitorsManager,
"_create_and_start_monitor_process")
def test_proc_configs_starts_new_monitors_for_new_configs_to_be_monitored(
self, startup_mock, mock_ack) -> None:
# We will check whether _create_and_start_monitor_process is called
# correctly on each newly added configuration if
# `monitor_repo = True`. Implicitly we will be also testing that if
# `monitor_repo = False` no new monitor is created.
mock_ack.return_value = None
startup_mock.return_value = None
new_configs_chain = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'repo_1',
'monitor_repo': "True",
},
'config_id3': {
'id': 'config_id3',
'parent_id': 'chain_1',
'repo_name': 'repo_3',
'monitor_repo': "True",
},
'config_id4': {
'id': 'config_id4',
'parent_id': 'chain_1',
'repo_name': 'repo_4',
'monitor_repo': "False",
}
}
new_configs_general = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'repo_2',
'monitor_repo': "True",
},
'config_id5': {
'id': 'config_id5',
'parent_id': 'GENERAL',
'repo_name': 'repo_5',
'monitor_repo': "True",
},
'config_id6': {
'id': 'config_id6',
'parent_id': 'GENERAL',
'repo_name': 'repo_6',
'monitor_repo': "False",
}
}
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(
self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
body_new_configs_chain = json.dumps(new_configs_chain)
body_new_configs_general = json.dumps(new_configs_general)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.assertEqual(1, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id1' and 'Substrate Polkadot' in args)
self.assertEqual(
self.sent_configs_example_chain['config_id1']['id'],
args[0].repo_id)
self.assertEqual(
self.sent_configs_example_chain['config_id1']['parent_id'],
args[0].parent_id)
self.assertEqual(self.sent_configs_example_chain['config_id1'][
'repo_name'] + '/', args[0].repo_name)
self.assertEqual(
str_to_bool(
self.sent_configs_example_chain['config_id1'][
'monitor_repo']), args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
self.sent_configs_example_chain['config_id1']['repo_name']
+ '/'), args[0].releases_page)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.assertEqual(2, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id3' and 'Substrate Polkadot' in args)
self.assertEqual(new_configs_chain['config_id3']['id'],
args[0].repo_id)
self.assertEqual(new_configs_chain['config_id3']['parent_id'],
args[0].parent_id)
self.assertEqual(new_configs_chain['config_id3']['repo_name'] + '/',
args[0].repo_name)
self.assertEqual(
str_to_bool(new_configs_chain['config_id3']['monitor_repo']),
args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
new_configs_chain['config_id3']['repo_name'] + '/'),
args[0].releases_page)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
self.assertEqual(3, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id2' and 'general' in args)
self.assertEqual(
self.sent_configs_example_general['config_id2']['id'],
args[0].repo_id)
self.assertEqual(
self.sent_configs_example_general['config_id2']['parent_id'],
args[0].parent_id)
self.assertEqual(self.sent_configs_example_general['config_id2'][
'repo_name'] + '/', args[0].repo_name)
self.assertEqual(
str_to_bool(
self.sent_configs_example_general['config_id2'][
'monitor_repo']), args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
self.sent_configs_example_general['config_id2']['repo_name']
+ '/'), args[0].releases_page)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
self.assertEqual(4, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id5' and 'general' in args)
self.assertEqual(new_configs_general['config_id5']['id'],
args[0].repo_id)
self.assertEqual(new_configs_general['config_id5']['parent_id'],
args[0].parent_id)
self.assertEqual(new_configs_general['config_id5'][
'repo_name'] + '/', args[0].repo_name)
self.assertEqual(
str_to_bool(new_configs_general['config_id5']['monitor_repo']),
args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
new_configs_general['config_id5']['repo_name'] + '/'),
args[0].releases_page)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch("src.monitors.starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_proc_confs_term_and_starts_monitors_for_modified_confs_to_be_mon(
self, mock_ack, mock_create_logger) -> None:
# In this test we will check that modified configurations with
# `monitor_repo = True` will have new monitors started. Implicitly
# we will be checking that modified configs with
# `monitor_repo = False` will only have their previous processes
# terminated.
mock_ack.return_value = None
mock_create_logger.return_value = self.dummy_logger
new_configs_chain_monitor_true = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'new_repo_name_chain',
'monitor_repo': "True",
},
}
new_configs_chain_monitor_false = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'new_repo_name_chain',
'monitor_repo': "False",
},
}
new_configs_general_monitor_true = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'new_repo_name_general',
'monitor_repo': "True",
},
}
new_configs_general_monitor_false = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'new_repo_name_general',
'monitor_repo': "false",
},
}
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
body_chain_mon_true = json.dumps(new_configs_chain_monitor_true)
body_general_mon_true = json.dumps(
new_configs_general_monitor_true)
body_chain_mon_false = json.dumps(new_configs_chain_monitor_false)
body_general_mon_false = json.dumps(
new_configs_general_monitor_false)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give some time till the processes start
time.sleep(1)
# Assure that the processes have been started
self.assertTrue(self.test_manager.config_process_dict[
'config_id1']['process'].is_alive())
self.assertTrue(self.test_manager.config_process_dict[
'config_id2']['process'].is_alive())
# Send the updated configs with `monitor_repo = True`
conf_id1_old_proc = self.test_manager.config_process_dict[
'config_id1']['process']
conf_id2_old_proc = self.test_manager.config_process_dict[
'config_id2']['process']
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_mon_true)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_general_mon_true)
# Give some time till the processes restart
time.sleep(1)
# Check that the old proceses has terminated and a new one has
# started.
self.assertFalse(conf_id1_old_proc.is_alive())
self.assertTrue(self.test_manager.config_process_dict[
'config_id1']['process'].is_alive())
self.assertFalse(conf_id2_old_proc.is_alive())
self.assertTrue(self.test_manager.config_process_dict[
'config_id2']['process'].is_alive())
# Send the updated configs with `monitor_repo = False`
conf_id1_old_proc = self.test_manager.config_process_dict[
'config_id1']['process']
conf_id2_old_proc = self.test_manager.config_process_dict[
'config_id2']['process']
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_mon_false)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_general_mon_false)
# Give some time till the processes stops
time.sleep(1)
# Check that the old process has terminated and that new ones have
# not been started. Note, if _create_start_process is called then
# the config ids would be in config_process_dict
self.assertFalse(conf_id1_old_proc.is_alive())
self.assertFalse(
'config_id1' in self.test_manager.config_process_dict)
self.assertFalse(conf_id2_old_proc.is_alive())
self.assertFalse(
'config_id2' in self.test_manager.config_process_dict)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch.object(GitHubMonitorsManager,
"_create_and_start_monitor_process")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "terminate")
def test_process_confs_restarts_an_updated_monitor_with_the_correct_conf(
self, mock_terminate, mock_join, startup_mock, mock_ack) -> None:
# We will check whether _create_and_start_monitor_process is called
# correctly on an updated configuration.
mock_ack.return_value = None
startup_mock.return_value = None
mock_terminate.return_value = None
mock_join.return_value = None
updated_configs_chain = {
'config_id1': {
'id': 'config_id1',
'parent_id': 'chain_1',
'repo_name': 'changed_repo_name_chain',
'monitor_repo': "True",
},
}
updated_configs_general = {
'config_id2': {
'id': 'config_id2',
'parent_id': 'GENERAL',
'repo_name': 'changed_repo_name_gen',
'monitor_repo': "True",
},
}
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_updated_configs_chain = json.dumps(updated_configs_chain)
body_updated_configs_general = json.dumps(updated_configs_general)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_updated_configs_chain)
self.assertEqual(1, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id1' and 'Substrate Polkadot' in args)
self.assertEqual(updated_configs_chain['config_id1']['id'],
args[0].repo_id)
self.assertEqual(updated_configs_chain['config_id1']['parent_id'],
args[0].parent_id)
self.assertEqual(
updated_configs_chain['config_id1']['repo_name'] + '/',
args[0].repo_name)
self.assertEqual(
str_to_bool(
updated_configs_chain['config_id1']['monitor_repo']),
args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
updated_configs_chain['config_id1']['repo_name'] + '/'),
args[0].releases_page)
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_updated_configs_general)
self.assertEqual(2, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id2' and 'general' in args)
self.assertEqual(updated_configs_general['config_id2']['id'],
args[0].repo_id)
self.assertEqual(updated_configs_general['config_id2']['parent_id'],
args[0].parent_id)
self.assertEqual(
updated_configs_general['config_id2']['repo_name'] + '/',
args[0].repo_name)
self.assertEqual(
str_to_bool(
updated_configs_general['config_id2']['monitor_repo']),
args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
updated_configs_general['config_id2']['repo_name'] + '/'),
args[0].releases_page)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch("src.monitors.starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_terminates_monitors_for_removed_configs(
self, mock_ack, mock_create_logger) -> None:
# In this test we will check that when a config is removed, it's monitor
# is terminated by _process_configs.
mock_ack.return_value = None
mock_create_logger.return_value = self.dummy_logger
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
body_chain_new = json.dumps({})
body_general_new = json.dumps({})
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give time for the monitors to start
time.sleep(1)
# Assure that the processes have been started
self.assertTrue(self.test_manager.config_process_dict[
'config_id1']['process'].is_alive())
self.assertTrue(self.test_manager.config_process_dict[
'config_id2']['process'].is_alive())
# Send the updated configs
conf_id1_old_proc = self.test_manager.config_process_dict[
'config_id1']['process']
conf_id2_old_proc = self.test_manager.config_process_dict[
'config_id2']['process']
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_new)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_new)
# Give time for the monitors to stop
time.sleep(1)
# Check that the old process has terminated
self.assertFalse(conf_id1_old_proc.is_alive())
self.assertFalse(conf_id2_old_proc.is_alive())
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_new_configs_with_missing_keys(
self, mock_ack) -> None:
# We will check whether the state is kept intact if new configurations
# with missing keys are sent. Exceptions should never be raised in this
# case, and basic_ack must be called to ignore the message.
mock_ack.return_value = None
new_configs_chain = {
'config_id3': {
'id': 'config_id3',
'parentfg_id': 'chain_1',
'repo_namfge': 'repo_3',
'monitorfg_repo': "True",
},
}
new_configs_general = {
'config_id5': {
'id': 'config_id5',
'parentdfg_id': 'GENERAL',
'repo_namdfge': 'repo_5',
'monitor_repostdfg': "True",
},
}
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_new_configs_chain = json.dumps(new_configs_chain)
body_new_configs_general = json.dumps(new_configs_general)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_new_configs_general)
self.assertEqual(1, mock_ack.call_count)
self.assertEqual(self.config_process_dict_example,
self.test_manager.config_process_dict)
self.assertEqual(self.github_repos_configs_example,
self.test_manager.github_repos_configs)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_new_configs_chain)
self.assertEqual(2, mock_ack.call_count)
self.assertEqual(self.config_process_dict_example,
self.test_manager.config_process_dict)
self.assertEqual(self.github_repos_configs_example,
self.test_manager.github_repos_configs)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_configs_ignores_modified_configs_with_missing_Keys(
self, mock_ack) -> None:
# We will check whether the state is kept intact if modified
# configurations with missing keys are sent. Exceptions should never be
# raised in this case, and basic_ack must be called to ignore the
# message.
mock_ack.return_value = None
updated_configs_chain = {
'config_id1': {
'id': 'config_id1',
'parentfg_id': 'chain_1',
'repo_namfge': 'repo_1',
'monitorfg_repo': "True",
},
}
updated_configs_general = {
'config_id2': {
'id': 'config_id2',
'parentdfg_id': 'GENERAL',
'repo_namdfge': 'repo_2',
'monitor_repo': "True",
},
}
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
try:
# Must create a connection so that the blocking channel is passed
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
# We will send new configs through both the existing and
# non-existing chain and general paths to make sure that all routes
# work as expected.
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_updated_configs_chain = json.dumps(updated_configs_chain)
body_updated_configs_general = json.dumps(updated_configs_general)
properties = pika.spec.BasicProperties()
self.test_manager._process_configs(blocking_channel, method_general,
properties,
body_updated_configs_general)
self.assertEqual(1, mock_ack.call_count)
self.assertEqual(self.config_process_dict_example,
self.test_manager.config_process_dict)
self.assertEqual(self.github_repos_configs_example,
self.test_manager.github_repos_configs)
self.test_manager._process_configs(blocking_channel, method_chains,
properties,
body_updated_configs_chain)
self.assertEqual(2, mock_ack.call_count)
self.assertEqual(self.config_process_dict_example,
self.test_manager.config_process_dict)
self.assertEqual(self.github_repos_configs_example,
self.test_manager.github_repos_configs)
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch("src.monitors.starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_ping_sends_a_valid_hb_if_all_processes_are_alive(
self, mock_ack, mock_create_logger) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# received heartbeat is valid.
mock_create_logger.return_value = self.dummy_logger
mock_ack.return_value = None
try:
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give time for the processes to start
time.sleep(1)
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
# Initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is a valid HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
expected_output = {
'component_name': self.test_manager.name,
'running_processes':
[self.test_manager.config_process_dict['config_id1'][
'component_name'],
self.test_manager.config_process_dict['config_id2'][
'component_name']],
'dead_processes': [],
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.assertEqual(expected_output, json.loads(body))
# Clean before test finishes
self.test_manager.config_process_dict['config_id1'][
'process'].terminate()
self.test_manager.config_process_dict['config_id2'][
'process'].terminate()
self.test_manager.config_process_dict['config_id1'][
'process'].join()
self.test_manager.config_process_dict['config_id2'][
'process'].join()
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch("src.monitors.starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_ping_sends_a_valid_hb_if_some_processes_alive_some_dead(
self, mock_ack, mock_create_logger) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# received heartbeat is valid.
mock_create_logger.return_value = self.dummy_logger
mock_ack.return_value = None
try:
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give time for the processes to start
time.sleep(1)
self.test_manager.config_process_dict['config_id1'][
'process'].terminate()
self.test_manager.config_process_dict['config_id1'][
'process'].join()
# Give time for the process to stop
time.sleep(1)
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
# Initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is a valid HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
expected_output = {
'component_name': self.test_manager.name,
'running_processes':
[self.test_manager.config_process_dict['config_id2'][
'component_name']],
'dead_processes':
[self.test_manager.config_process_dict['config_id1'][
'component_name']],
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.assertEqual(expected_output, json.loads(body))
# Clean before test finishes
self.test_manager.config_process_dict['config_id2'][
'process'].terminate()
self.test_manager.config_process_dict['config_id2'][
'process'].join()
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch("src.monitors.starters.create_logger")
@mock.patch.object(RabbitMQApi, "basic_ack")
def test_process_ping_sends_a_valid_hb_if_all_processes_dead(
self, mock_ack, mock_create_logger) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat, and checks that the
# received heartbeat is valid.
mock_create_logger.return_value = self.dummy_logger
mock_ack.return_value = None
try:
self.test_manager._initialise_rabbitmq()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give time for the processes to start
time.sleep(1)
self.test_manager.config_process_dict['config_id1'][
'process'].terminate()
self.test_manager.config_process_dict['config_id1'][
'process'].join()
self.test_manager.config_process_dict['config_id2'][
'process'].terminate()
self.test_manager.config_process_dict['config_id2'][
'process'].join()
# Give time for the process to stop
time.sleep(1)
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
# Initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(1, res.method.message_count)
# Check that the message received is a valid HB
_, _, body = self.test_manager.rabbitmq.basic_get(
self.test_queue_name)
expected_output = {
'component_name': self.test_manager.name,
'running_processes': [],
'dead_processes':
[self.test_manager.config_process_dict['config_id1'][
'component_name'],
self.test_manager.config_process_dict['config_id2'][
'component_name']],
'timestamp': datetime(2012, 1, 1).timestamp(),
}
self.assertEqual(expected_output, json.loads(body))
except Exception as e:
self.fail("Test failed: {}".format(e))
@freeze_time("2012-01-01")
@mock.patch.object(RabbitMQApi, "basic_ack")
@mock.patch("src.monitors.starters.create_logger")
@mock.patch.object(GitHubMonitorsManager, "_send_heartbeat")
def test_process_ping_restarts_dead_processes(
self, send_hb_mock, mock_create_logger, mock_ack) -> None:
send_hb_mock.return_value = None
mock_create_logger.return_value = self.dummy_logger
mock_ack.return_value = None
try:
self.test_manager.rabbitmq.connect()
blocking_channel = self.test_manager.rabbitmq.channel
method_chains = pika.spec.Basic.Deliver(
routing_key=self.chains_routing_key)
method_general = pika.spec.Basic.Deliver(
routing_key=self.general_routing_key)
body_chain_initial = json.dumps(self.sent_configs_example_chain)
body_general_initial = json.dumps(
self.sent_configs_example_general)
properties = pika.spec.BasicProperties()
# First send the new configs as the state is empty
self.test_manager._process_configs(blocking_channel, method_chains,
properties, body_chain_initial)
self.test_manager._process_configs(blocking_channel, method_general,
properties, body_general_initial)
# Give time for the processes to start
time.sleep(1)
# Automate the case when having all processes dead
self.test_manager.config_process_dict['config_id1'][
'process'].terminate()
self.test_manager.config_process_dict['config_id1'][
'process'].join()
self.test_manager.config_process_dict['config_id2'][
'process'].terminate()
self.test_manager.config_process_dict['config_id2'][
'process'].join()
# Give time for the processes to terminate
time.sleep(1)
# Check that that the processes have terminated
self.assertFalse(self.test_manager.config_process_dict[
'config_id1']['process'].is_alive())
self.assertFalse(self.test_manager.config_process_dict[
'config_id2']['process'].is_alive())
# Initialise
method_hb = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
body = 'ping'
self.test_manager._process_ping(blocking_channel, method_hb,
properties, body)
# Give time for the processes to start
time.sleep(1)
self.assertTrue(self.test_manager.config_process_dict['config_id1'][
'process'].is_alive())
self.assertTrue(self.test_manager.config_process_dict['config_id2'][
'process'].is_alive())
# Clean before test finishes
self.test_manager.config_process_dict['config_id1'][
'process'].terminate()
self.test_manager.config_process_dict['config_id1'][
'process'].join()
self.test_manager.config_process_dict['config_id2'][
'process'].terminate()
self.test_manager.config_process_dict['config_id2'][
'process'].join()
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(GitHubMonitorsManager, "_send_heartbeat")
@mock.patch.object(GitHubMonitorsManager,
"_create_and_start_monitor_process")
@mock.patch.object(multiprocessing.Process, "join")
@mock.patch.object(multiprocessing.Process, "is_alive")
def test_process_ping_restarts_dead_processes_with_correct_info(
self, mock_alive, mock_join, startup_mock, send_hb_mock) -> None:
send_hb_mock.return_value = None
startup_mock.return_value = None
mock_alive.return_value = False
mock_join.return_value = None
try:
self.test_manager.rabbitmq.connect()
del self.github_repos_configs_example['general']
del self.config_process_dict_example['config_id2']
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
# Initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.test_manager._process_ping(blocking_channel, method,
properties, body)
self.assertEqual(1, startup_mock.call_count)
args, _ = startup_mock.call_args
self.assertTrue('config_id1' and 'Substrate Polkadot' in args)
self.assertEqual(self.github_repos_configs_example[
'Substrate Polkadot']['config_id1']['id'],
args[0].repo_id)
self.assertEqual(self.github_repos_configs_example[
'Substrate Polkadot']['config_id1'][
'parent_id'],
args[0].parent_id)
self.assertEqual(self.github_repos_configs_example[
'Substrate Polkadot']['config_id1'][
'repo_name'] + '/',
args[0].repo_name)
self.assertEqual(
str_to_bool(self.github_repos_configs_example[
'Substrate Polkadot']['config_id1'][
'monitor_repo']),
args[0].monitor_repo)
self.assertEqual(env.GITHUB_RELEASES_TEMPLATE.format(
self.github_repos_configs_example[
'Substrate Polkadot']['config_id1']['repo_name'] + '/'),
args[0].releases_page)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(multiprocessing.Process, "is_alive")
@mock.patch.object(multiprocessing.Process, "start")
@mock.patch.object(multiprocessing, 'Process')
def test_process_ping_does_not_send_hb_if_processing_fails(
self, mock_process, mock_start, is_alive_mock) -> None:
# This test creates a queue which receives messages with the same
# routing key as the ones sent by send_heartbeat. In this test we will
# check that no heartbeat is sent when mocking a raised exception.
is_alive_mock.side_effect = self.test_exception
mock_start.return_value = None
mock_process.side_effect = self.dummy_process1
try:
self.test_manager._initialise_rabbitmq()
# Delete the queue before to avoid messages in the queue on error.
self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
self.test_manager._github_repos_configs = \
self.github_repos_configs_example
self.test_manager._config_process_dict = \
self.config_process_dict_example
# Initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=False
)
self.assertEqual(0, res.method.message_count)
self.test_manager.rabbitmq.queue_bind(
queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
self.test_manager._process_ping(blocking_channel, method,
properties, body)
# By re-declaring the queue again we can get the number of messages
# in the queue.
res = self.test_manager.rabbitmq.queue_declare(
queue=self.test_queue_name, durable=True, exclusive=False,
auto_delete=False, passive=True
)
self.assertEqual(0, res.method.message_count)
except Exception as e:
self.fail("Test failed: {}".format(e))
def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed(
self) -> None:
try:
self.test_manager._initialise_rabbitmq()
# Initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.test_manager._process_ping(blocking_channel, method,
properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(GitHubMonitorsManager, "_send_heartbeat")
def test_process_ping_send_hb_raises_amqp_connection_err_on_connection_err(
self, hb_mock) -> None:
hb_mock.side_effect = pika.exceptions.AMQPConnectionError('test')
try:
self.test_manager._initialise_rabbitmq()
# Initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.assertRaises(pika.exceptions.AMQPConnectionError,
self.test_manager._process_ping, blocking_channel,
method, properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(GitHubMonitorsManager, "_send_heartbeat")
def test_process_ping_send_hb_raises_amqp_chan_err_on_chan_err(
self, hb_mock) -> None:
hb_mock.side_effect = pika.exceptions.AMQPChannelError('test')
try:
self.test_manager._initialise_rabbitmq()
# Initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.assertRaises(pika.exceptions.AMQPChannelError,
self.test_manager._process_ping, blocking_channel,
method, properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
@mock.patch.object(GitHubMonitorsManager, "_send_heartbeat")
def test_process_ping_send_hb_raises_exception_on_unexpected_exception(
self, hb_mock) -> None:
hb_mock.side_effect = self.test_exception
try:
self.test_manager._initialise_rabbitmq()
# Initialise
blocking_channel = self.test_manager.rabbitmq.channel
method = pika.spec.Basic.Deliver(
routing_key=HEARTBEAT_OUTPUT_MANAGER_ROUTING_KEY)
properties = pika.spec.BasicProperties()
body = 'ping'
self.assertRaises(PANICException, self.test_manager._process_ping,
blocking_channel, method, properties, body)
except Exception as e:
self.fail("Test failed: {}".format(e))
| 47.220749
| 80
| 0.596805
| 8,767
| 81,928
| 5.231778
| 0.046538
| 0.04936
| 0.079796
| 0.04162
| 0.870975
| 0.851418
| 0.828591
| 0.807901
| 0.800728
| 0.788214
| 0
| 0.006771
| 0.324004
| 81,928
| 1,734
| 81
| 47.247982
| 0.821407
| 0.09617
| 0
| 0.761268
| 0
| 0
| 0.083075
| 0.00827
| 0
| 0
| 0
| 0
| 0.090141
| 1
| 0.022535
| false
| 0.007746
| 0.015493
| 0
| 0.038732
| 0.000704
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d85fc52a94a6298aebc8f70bd07fc702993fdaa
| 649
|
py
|
Python
|
lists_name/list_name_row.py
|
Skrad20/UI_lab
|
95cd3aeaecf29d06cbe0a13803650e2bb23df508
|
[
"BSD-3-Clause"
] | null | null | null |
lists_name/list_name_row.py
|
Skrad20/UI_lab
|
95cd3aeaecf29d06cbe0a13803650e2bb23df508
|
[
"BSD-3-Clause"
] | null | null | null |
lists_name/list_name_row.py
|
Skrad20/UI_lab
|
95cd3aeaecf29d06cbe0a13803650e2bb23df508
|
[
"BSD-3-Clause"
] | null | null | null |
list_name_row_add_father = [
'Имя',
'Инвертарный номер',
'Хозяйство',
'BM1818',
'BM1824',
'BM2113',
'CSRM60',
'CSSM66',
'CYP21',
'ETH10',
'ETH225',
'ETH3',
'ILSTS6',
'INRA023',
'RM067',
'SPS115',
'TGLA122',
'TGLA126',
'TGLA227',
'TGLA53',
'MGTG4B',
'SPS113',
]
list_name_row_search_father = [
'BM1818',
'BM1824',
'BM2113',
'CSRM60',
'CSSM66',
'CYP21',
'ETH10',
'ETH225',
'ETH3',
'ILSTS6',
'INRA023',
'RM067',
'SPS115',
'TGLA122',
'TGLA126',
'TGLA227',
'TGLA53',
'MGTG4B',
'SPS113',
]
| 13.808511
| 31
| 0.469954
| 52
| 649
| 5.711538
| 0.557692
| 0.053872
| 0.074074
| 0.161616
| 0.760943
| 0.760943
| 0.760943
| 0.760943
| 0.760943
| 0.760943
| 0
| 0.225806
| 0.331279
| 649
| 46
| 32
| 14.108696
| 0.458525
| 0
| 0
| 0.844444
| 0
| 0
| 0.392912
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0db18ef822bd6e08373699d087d2f10286e9e618
| 453
|
py
|
Python
|
day01/__init__.py
|
T-101/aoc17
|
23bd338c89c640c5a43e1b3a41e22aed12a9c796
|
[
"MIT"
] | null | null | null |
day01/__init__.py
|
T-101/aoc17
|
23bd338c89c640c5a43e1b3a41e22aed12a9c796
|
[
"MIT"
] | null | null | null |
day01/__init__.py
|
T-101/aoc17
|
23bd338c89c640c5a43e1b3a41e22aed12a9c796
|
[
"MIT"
] | null | null | null |
def day01_1(input_data):
result = 0
for i in range(len(input_data)):
if int(input_data[i]) == int(input_data[(i + 1) % len(input_data)]):
result += int(input_data[i])
return result
def day01_2(input_data):
result = 0
for i in range(len(input_data)):
if int(input_data[i]) == int(input_data[(i + int(len(input_data) / 2)) % len(input_data)]):
result += int(input_data[i])
return result
| 26.647059
| 99
| 0.596026
| 72
| 453
| 3.541667
| 0.222222
| 0.458824
| 0.282353
| 0.305882
| 0.87451
| 0.862745
| 0.862745
| 0.862745
| 0.862745
| 0.862745
| 0
| 0.029499
| 0.251656
| 453
| 16
| 100
| 28.3125
| 0.722714
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2175823ce49c9e2e30390805b568c52388a32fb8
| 11,681
|
py
|
Python
|
tests/test_early_init.py
|
sm0k/tortoise-orm
|
a3f0ff621a449362f11461d670665afc2f20d31a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_early_init.py
|
sm0k/tortoise-orm
|
a3f0ff621a449362f11461d670665afc2f20d31a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_early_init.py
|
sm0k/tortoise-orm
|
a3f0ff621a449362f11461d670665afc2f20d31a
|
[
"Apache-2.0"
] | null | null | null |
from tortoise import Tortoise, fields
from tortoise.contrib import test
from tortoise.contrib.pydantic import pydantic_model_creator
from tortoise.models import Model
class Tournament(Model):
id = fields.IntField(pk=True)
name = fields.CharField(max_length=100)
created_at = fields.DatetimeField(auto_now_add=True)
events: fields.ReverseRelation["Event"]
class Meta:
ordering = ["name"]
class Event(Model):
"""
The Event model docstring.
This is multiline docs.
"""
id = fields.IntField(pk=True)
#: The Event NAME
#: It's pretty important
name = fields.CharField(max_length=255)
created_at = fields.DatetimeField(auto_now_add=True)
tournament: fields.ForeignKeyNullableRelation[Tournament] = fields.ForeignKeyField(
"models.Tournament", related_name="events", null=True
)
class Meta:
ordering = ["name"]
class TestBasic(test.TestCase):
def test_early_init(self):
self.maxDiff = None
Event_TooEarly = pydantic_model_creator(Event)
self.assertEqual(
Event_TooEarly.schema(),
{
"title": "Event",
"type": "object",
"description": "The Event model docstring.<br/><br/>This is multiline docs.",
"properties": {
"id": {"title": "Id", "type": "integer", "maximum": 2147483647, "minimum": 1},
"name": {
"title": "Name",
"type": "string",
"description": "The Event NAME<br/>It's pretty important",
"maxLength": 255,
},
"created_at": {
"title": "Created At",
"type": "string",
"format": "date-time",
"readOnly": True,
},
},
"required": ["id", "name", "created_at"],
"additionalProperties": False,
},
)
self.assertEqual(
Tortoise.describe_model(Event),
{
"name": "None.",
"app": None,
"table": "",
"abstract": False,
"description": "The Event model docstring.",
"docstring": "The Event model docstring.\n\nThis is multiline docs.",
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"db_field_types": {"": "INT"},
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
},
"data_fields": [
{
"name": "name",
"field_type": "CharField",
"db_column": "name",
"db_field_types": {"": "VARCHAR(255)"},
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "The Event NAME",
"docstring": "The Event NAME\nIt's pretty important",
"constraints": {"max_length": 255},
},
{
"name": "created_at",
"field_type": "DatetimeField",
"db_column": "created_at",
"db_field_types": {"": "TIMESTAMP", "mysql": "DATETIME(6)"},
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"readOnly": True},
},
],
"fk_fields": [
{
"name": "tournament",
"field_type": "ForeignKeyFieldInstance",
"raw_field": None,
"python_type": "None",
"generated": False,
"nullable": True,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
}
],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
)
Tortoise.init_models(["tests.test_early_init"], "models")
Event_Pydantic = pydantic_model_creator(Event)
self.assertEqual(
Event_Pydantic.schema(),
{
"title": "Event",
"type": "object",
"description": "The Event model docstring.<br/><br/>This is multiline docs.",
"properties": {
"id": {"title": "Id", "type": "integer", "maximum": 2147483647, "minimum": 1},
"name": {
"title": "Name",
"type": "string",
"description": "The Event NAME<br/>It's pretty important",
"maxLength": 255,
},
"created_at": {
"title": "Created At",
"type": "string",
"format": "date-time",
"readOnly": True,
},
"tournament": {
"title": "Tournament",
"nullable": True,
"allOf": [{"$ref": "#/definitions/Tournament"}],
},
},
"definitions": {
"Tournament": {
"title": "Tournament",
"type": "object",
"properties": {
"id": {
"title": "Id",
"type": "integer",
"maximum": 2147483647,
"minimum": 1,
},
"name": {"title": "Name", "type": "string", "maxLength": 100},
"created_at": {
"title": "Created At",
"type": "string",
"format": "date-time",
"readOnly": True,
},
},
"required": ["id", "name", "created_at"],
"additionalProperties": False,
}
},
"required": ["id", "name", "created_at"],
"additionalProperties": False,
},
)
self.assertEqual(
Tortoise.describe_model(Event),
{
"name": "models.Event",
"app": "models",
"table": "event",
"abstract": False,
"description": "The Event model docstring.",
"docstring": "The Event model docstring.\n\nThis is multiline docs.",
"unique_together": [],
"pk_field": {
"name": "id",
"field_type": "IntField",
"db_column": "id",
"db_field_types": {"": "INT"},
"python_type": "int",
"generated": True,
"nullable": False,
"unique": True,
"indexed": True,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": 1, "le": 2147483647},
},
"data_fields": [
{
"name": "name",
"field_type": "CharField",
"db_column": "name",
"db_field_types": {"": "VARCHAR(255)"},
"python_type": "str",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": "The Event NAME",
"docstring": "The Event NAME\nIt's pretty important",
"constraints": {"max_length": 255},
},
{
"name": "created_at",
"field_type": "DatetimeField",
"db_column": "created_at",
"db_field_types": {"": "TIMESTAMP", "mysql": "DATETIME(6)"},
"python_type": "datetime.datetime",
"generated": False,
"nullable": False,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"readOnly": True},
},
{
"name": "tournament_id",
"field_type": "IntField",
"db_column": "tournament_id",
"db_field_types": {"": "INT"},
"python_type": "int",
"generated": False,
"nullable": True,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {"ge": -2147483648, "le": 2147483647},
},
],
"fk_fields": [
{
"name": "tournament",
"field_type": "ForeignKeyFieldInstance",
"raw_field": "tournament_id",
"python_type": "models.Tournament",
"generated": False,
"nullable": True,
"unique": False,
"indexed": False,
"default": None,
"description": None,
"docstring": None,
"constraints": {},
}
],
"backward_fk_fields": [],
"o2o_fields": [],
"backward_o2o_fields": [],
"m2m_fields": [],
},
)
| 39.197987
| 98
| 0.362212
| 732
| 11,681
| 5.64071
| 0.161202
| 0.032696
| 0.047954
| 0.037297
| 0.804311
| 0.767498
| 0.760959
| 0.739162
| 0.718334
| 0.681037
| 0
| 0.01939
| 0.51434
| 11,681
| 297
| 99
| 39.329966
| 0.708444
| 0.007876
| 0
| 0.700361
| 0
| 0
| 0.26046
| 0.012016
| 0
| 0
| 0
| 0
| 0.01444
| 1
| 0.00361
| false
| 0
| 0.028881
| 0
| 0.079422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
21abdfee14f7af74c2492d7cab810a18e40ee363
| 6,367
|
py
|
Python
|
resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtGui/QItemSelection.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | 1
|
2020-04-20T02:27:20.000Z
|
2020-04-20T02:27:20.000Z
|
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QItemSelection.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
resources/dot_PyCharm/system/python_stubs/cache/8cdc475d469a13122bc4bc6c3ac1c215d93d5f120f5cc1ef33a8f3088ee54d8e/PySide/QtGui/QItemSelection.py
|
basepipe/developer_onboarding
|
05b6a776f8974c89517868131b201f11c6c2a5ad
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module PySide.QtGui
# from C:\Python27\lib\site-packages\PySide\QtGui.pyd
# by generator 1.147
# no doc
# imports
import PySide.QtCore as __PySide_QtCore
import Shiboken as __Shiboken
class QItemSelection(__Shiboken.Object):
# no doc
def append(self, *args, **kwargs): # real signature unknown
pass
def at(self, *args, **kwargs): # real signature unknown
pass
def back(self, *args, **kwargs): # real signature unknown
pass
def clear(self, *args, **kwargs): # real signature unknown
pass
def contains(self, *args, **kwargs): # real signature unknown
pass
def count(self, *args, **kwargs): # real signature unknown
pass
def detachShared(self, *args, **kwargs): # real signature unknown
pass
def empty(self, *args, **kwargs): # real signature unknown
pass
def endsWith(self, *args, **kwargs): # real signature unknown
pass
def first(self, *args, **kwargs): # real signature unknown
pass
def fromSet(self, *args, **kwargs): # real signature unknown
pass
def fromVector(self, *args, **kwargs): # real signature unknown
pass
def front(self, *args, **kwargs): # real signature unknown
pass
def indexes(self, *args, **kwargs): # real signature unknown
pass
def indexOf(self, *args, **kwargs): # real signature unknown
pass
def insert(self, *args, **kwargs): # real signature unknown
pass
def isEmpty(self, *args, **kwargs): # real signature unknown
pass
def isSharedWith(self, *args, **kwargs): # real signature unknown
pass
def last(self, *args, **kwargs): # real signature unknown
pass
def lastIndexOf(self, *args, **kwargs): # real signature unknown
pass
def length(self, *args, **kwargs): # real signature unknown
pass
def merge(self, *args, **kwargs): # real signature unknown
pass
def mid(self, *args, **kwargs): # real signature unknown
pass
def move(self, *args, **kwargs): # real signature unknown
pass
def pop_back(self, *args, **kwargs): # real signature unknown
pass
def pop_front(self, *args, **kwargs): # real signature unknown
pass
def prepend(self, *args, **kwargs): # real signature unknown
pass
def push_back(self, *args, **kwargs): # real signature unknown
pass
def push_front(self, *args, **kwargs): # real signature unknown
pass
def removeAll(self, *args, **kwargs): # real signature unknown
pass
def removeAt(self, *args, **kwargs): # real signature unknown
pass
def removeFirst(self, *args, **kwargs): # real signature unknown
pass
def removeLast(self, *args, **kwargs): # real signature unknown
pass
def removeOne(self, *args, **kwargs): # real signature unknown
pass
def replace(self, *args, **kwargs): # real signature unknown
pass
def reserve(self, *args, **kwargs): # real signature unknown
pass
def select(self, *args, **kwargs): # real signature unknown
pass
def setSharable(self, *args, **kwargs): # real signature unknown
pass
def size(self, *args, **kwargs): # real signature unknown
pass
def split(self, *args, **kwargs): # real signature unknown
pass
def startsWith(self, *args, **kwargs): # real signature unknown
pass
def swap(self, *args, **kwargs): # real signature unknown
pass
def takeAt(self, *args, **kwargs): # real signature unknown
pass
def takeFirst(self, *args, **kwargs): # real signature unknown
pass
def takeLast(self, *args, **kwargs): # real signature unknown
pass
def toSet(self, *args, **kwargs): # real signature unknown
pass
def toVector(self, *args, **kwargs): # real signature unknown
pass
def value(self, *args, **kwargs): # real signature unknown
pass
def __add__(self, y): # real signature unknown; restored from __doc__
""" x.__add__(y) <==> x+y """
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __delitem__(self, y): # real signature unknown; restored from __doc__
""" x.__delitem__(y) <==> del x[y] """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __getitem__(self, y): # real signature unknown; restored from __doc__
""" x.__getitem__(y) <==> x[y] """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __iadd__(self, y): # real signature unknown; restored from __doc__
""" x.__iadd__(y) <==> x+=y """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __len__(self): # real signature unknown; restored from __doc__
""" x.__len__() <==> len(x) """
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lshift__(self, y): # real signature unknown; restored from __doc__
""" x.__lshift__(y) <==> x<<y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __radd__(self, y): # real signature unknown; restored from __doc__
""" x.__radd__(y) <==> y+x """
pass
def __rlshift__(self, y): # real signature unknown; restored from __doc__
""" x.__rlshift__(y) <==> y<<x """
pass
def __setitem__(self, i, y): # real signature unknown; restored from __doc__
""" x.__setitem__(i, y) <==> x[i]=y """
pass
| 27.682609
| 80
| 0.597142
| 763
| 6,367
| 4.70118
| 0.150721
| 0.239197
| 0.367996
| 0.250906
| 0.792306
| 0.761639
| 0.751882
| 0.741846
| 0.251464
| 0
| 0
| 0.001517
| 0.275326
| 6,367
| 229
| 81
| 27.803493
| 0.775899
| 0.389665
| 0
| 0.485294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.485294
| false
| 0.485294
| 0.014706
| 0
| 0.507353
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
df1e9411a71505cbb183a8cd66da9d55d314769c
| 19,510
|
py
|
Python
|
test/nn/test_geometrictensor.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 3
|
2021-04-24T08:40:28.000Z
|
2021-12-31T08:08:54.000Z
|
test/nn/test_geometrictensor.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | null | null | null |
test/nn/test_geometrictensor.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from unittest import TestCase
from e2cnn.nn import *
from e2cnn.gspaces import *
import torch
import random
class TestGeometricTensor(TestCase):
def test_sum(self):
for N in [2, 4, 7, 16]:
gs = Rot2dOnR2(N)
for irr in gs.irreps.values():
type = FieldType(gs, [irr] * 3)
for i in range(3):
t1 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
t2 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
out1 = t1.tensor + t2.tensor
out2 = (t1 + t2).tensor
out3 = (t2 + t1).tensor
self.assertTrue(torch.allclose(out1, out2))
self.assertTrue(torch.allclose(out3, out2))
def test_isum(self):
for N in [2, 4, 7, 16]:
gs = Rot2dOnR2(N)
for irr in gs.irreps.values():
type = FieldType(gs, [irr] * 3)
for i in range(5):
t1 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
t2 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
out1 = t1.tensor + t2.tensor
t1 += t2
out2 = t1.tensor
self.assertTrue(torch.allclose(out1, out2))
def test_sub(self):
for N in [2, 4, 7, 16]:
gs = Rot2dOnR2(N)
for irr in gs.irreps.values():
type = FieldType(gs, [irr]*3)
for i in range(3):
t1 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
t2 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
out1 = t1.tensor - t2.tensor
out2 = (t1 - t2).tensor
self.assertTrue(torch.allclose(out1, out2))
def test_isub(self):
for N in [2, 4, 7, 16]:
gs = Rot2dOnR2(N)
for irr in gs.irreps.values():
type = FieldType(gs, [irr] * 3)
for i in range(5):
t1 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
t2 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
out1 = t1.tensor - t2.tensor
t1 -= t2
out2 = t1.tensor
self.assertTrue(torch.allclose(out1, out2))
def test_mul(self):
for N in [2, 4, 7, 16]:
gs = Rot2dOnR2(N)
for irr in gs.irreps.values():
type = FieldType(gs, [irr] * 3)
for i in range(3):
t1 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
s = 10*torch.randn(1)
out1 = t1.tensor * s
out2 = (s * t1).tensor
out3 = (t1 * s).tensor
self.assertTrue(torch.allclose(out1, out2))
self.assertTrue(torch.allclose(out3, out2))
def test_imul(self):
for N in [2, 4, 7, 16]:
gs = Rot2dOnR2(N)
for irr in gs.irreps.values():
type = FieldType(gs, [irr] * 3)
for i in range(5):
t1 = GeometricTensor(torch.randn(10, type.size, 11, 11), type)
s = 10*torch.randn(1)
out1 = t1.tensor * s
t1 *= s
out2 = t1.tensor
self.assertTrue(torch.allclose(out1, out2))
def test_slicing(self):
for N in [2, 4, 7, 16]:
gs = FlipRot2dOnR2(N)
for irr in gs.irreps.values():
# with multiple fields
F = 7
type = FieldType(gs, [irr] * F)
for i in range(3):
t = torch.randn(10, type.size, 11, 11)
gt = GeometricTensor(t, type)
# slice all dims except the channels
self.assertTrue(torch.allclose(
t[2:3, :, 2:7, 2:7],
gt[2:3, :, 2:7, 2:7].tensor,
))
# slice only spatial dims
self.assertTrue(torch.allclose(
t[:, :, 2:7, 2:7],
gt[:, :, 2:7, 2:7].tensor,
))
self.assertTrue(torch.allclose(
t[:, :, 2:7, 2:7],
gt[..., 2:7, 2:7].tensor,
))
# slice only 1 spatial
self.assertTrue(torch.allclose(
t[..., 2:7],
gt[..., 2:7].tensor,
))
# slice only batch
self.assertTrue(torch.allclose(
t[2:4],
gt[2:4, ...].tensor,
))
self.assertTrue(torch.allclose(
t[2:4],
gt[2:4].tensor,
))
# different ranges
self.assertTrue(torch.allclose(
t[:, :, 1:9:2, 0:8:3],
gt[..., 1:9:2, 0:8:3].tensor,
))
# no slicing
self.assertTrue(torch.allclose(
t,
gt[:].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :, :, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[...].tensor,
))
# slice channels with all fields of same type
self.assertTrue(torch.allclose(
t[:, 1 * irr.size:4 * irr.size:],
gt[:, 1:4, ...].tensor,
))
# slice cover all channels
self.assertTrue(torch.allclose(
t,
gt[:, 0:7, ...].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, 0:7:1, ...].tensor,
))
# with a larger step
start = 1
end = 6
step = 2
self.assertTrue(torch.allclose(
t[:,
[f * irr.size + i for f in range(start, end, step) for i in range(irr.size)]
],
gt[:, start:end:step, ...].tensor,
))
start = 0
end = 7
step = 3
self.assertTrue(torch.allclose(
t[:,
[f * irr.size + i for f in range(start, end, step) for i in range(irr.size)]
],
gt[:, start:end:step, ...].tensor,
))
# with negative step
start = 6
end = 1
step = -1
self.assertTrue(torch.allclose(
t[:,
[f * irr.size + i for f in range(start, end, step) for i in range(irr.size)]
],
gt[:, start:end:step, ...].tensor,
))
start = 6
end = 1
step = -2
self.assertTrue(torch.allclose(
t[:,
[f * irr.size + i for f in range(start, end, step) for i in range(irr.size)]
],
gt[:, start:end:step, ...].tensor,
))
# 1 single field
start = 1
end = 2
step = 1
self.assertTrue(torch.allclose(
t[:,
[f * irr.size + i for f in range(start, end, step) for i in range(irr.size)]
],
gt[:, start:end:step, ...].tensor,
))
# index only one field
f = 2
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for i in range(irr.size)]
],
gt[:, f:f+1, ...].tensor,
))
# single index
f = 2
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for i in range(irr.size)]
],
gt[:, f, ...].tensor,
))
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for i in range(irr.size)]
],
gt[:, f].tensor,
))
self.assertTrue(torch.allclose(
t[1:2],
gt[1, ...].tensor,
))
self.assertTrue(torch.allclose(
t[..., 3:4],
gt[..., 3].tensor,
))
self.assertTrue(torch.allclose(
t[..., 2:3, 3:4],
gt[..., 2, 3].tensor,
))
self.assertTrue(torch.allclose(
t[3:4, ..., 2:3, 3:4],
gt[3, ..., 2, 3].tensor,
))
self.assertTrue(torch.allclose(
t[1:2, :irr.size],
gt[1, 0, ...].tensor,
))
self.assertTrue(torch.allclose(
t[1:2, :irr.size, 4:5, 2:3],
gt[1, 0, 4, 2].tensor,
))
# raise errors
with self.assertRaises(TypeError):
sliced = gt[2:5, 0:4, 1:7, 1:7, ...]
with self.assertRaises(TypeError):
sliced = gt[[2, 4, 2], 0:4, ...]
with self.assertRaises(TypeError):
sliced = gt[2, 0:4, range(3), range(3)]
# with a single field
F = 1
type = FieldType(gs, [irr] * F)
for i in range(3):
t = torch.randn(10, type.size, 11, 11)
gt = GeometricTensor(t, type)
# slice all dims except the channels
self.assertTrue(torch.allclose(
t[2:3, :, 2:7, 2:7],
gt[2:3, :, 2:7, 2:7].tensor,
))
# slice only spatial dims
self.assertTrue(torch.allclose(
t[:, :, 2:7, 2:7],
gt[:, :, 2:7, 2:7].tensor,
))
self.assertTrue(torch.allclose(
t[:, :, 2:7, 2:7],
gt[..., 2:7, 2:7].tensor,
))
# slice only 1 spatial
self.assertTrue(torch.allclose(
t[..., 2:7],
gt[..., 2:7].tensor,
))
# slice only batch
self.assertTrue(torch.allclose(
t[2:4],
gt[2:4, ...].tensor,
))
self.assertTrue(torch.allclose(
t[2:4],
gt[2:4].tensor,
))
# different ranges
self.assertTrue(torch.allclose(
t[:, :, 1:9:2, 0:8:3],
gt[..., 1:9:2, 0:8:3].tensor,
))
# no slicing
self.assertTrue(torch.allclose(
t,
gt[:].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :, :, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[...].tensor,
))
# 1 single field
self.assertTrue(torch.allclose(
t,
gt[:, 0:1, ...].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, 0, ...].tensor,
))
# negative index
self.assertTrue(torch.allclose(
t,
gt[:, -1, ...].tensor,
))
# with negative step
start = 0
end = -2
step = -1
self.assertTrue(torch.allclose(
t,
gt[:, start:end:step, ...].tensor,
))
for i in range(3):
reprs = list(gs.representations.values())*3
random.shuffle(reprs)
type = FieldType(gs, reprs)
F = len(type)
t = torch.randn(3, type.size, 3, 4)
gt = GeometricTensor(t, type)
# assignment should not be allowed
with self.assertRaises(TypeError):
gt[2, 1:3, ...] = torch.randn(gt[2, 1:3, ...].shape)
# no slicing
self.assertTrue(torch.allclose(
t,
gt[:].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :, :, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, :].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[...].tensor,
))
# slice channels with all fields of different types
self.assertTrue(torch.allclose(
t[:, type.fields_start[1]:type.fields_end[3]:],
gt[:, 1:4, ...].tensor,
))
# slice cover all channels
self.assertTrue(torch.allclose(
t,
gt[:, 0:F, ...].tensor,
))
self.assertTrue(torch.allclose(
t,
gt[:, 0:F:1, ...].tensor,
))
# with a larger step
start = 1
end = 6
step = 2
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for f in range(start, end, step) for i in range(type.representations[f].size)]
],
gt[:, start:end:step, ...].tensor,
))
start = 0
end = 7
step = 3
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for f in range(start, end, step) for i in range(type.representations[f].size)]
],
gt[:, start:end:step, ...].tensor,
))
# with negative step
start = 6
end = 1
step = -1
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for f in range(start, end, step) for i in range(type.representations[f].size)]
],
gt[:, start:end:step, ...].tensor,
))
start = 6
end = 1
step = -2
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for f in range(start, end, step) for i in range(type.representations[f].size)]
],
gt[:, start:end:step, ...].tensor,
))
# single index
for f in range(F):
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for i in range(type.representations[f].size)]
],
gt[:, f, ...].tensor,
))
self.assertTrue(torch.allclose(
t[:,
[type.fields_start[f] + i for i in range(type.representations[f].size)]
],
gt[:, f].tensor,
))
self.assertTrue(torch.allclose(
t[1:2,
[type.fields_start[f] + i for i in range(type.representations[f].size)]
],
gt[1, f, ...].tensor,
))
self.assertTrue(torch.allclose(
t[
1:2,
[type.fields_start[f] + i for i in range(type.representations[f].size)],
3:4,
4:5
],
gt[1, f, 3, 4].tensor,
))
if __name__ == '__main__':
unittest.main()
| 36.062847
| 124
| 0.328447
| 1,668
| 19,510
| 3.82494
| 0.065947
| 0.151411
| 0.205486
| 0.292006
| 0.887774
| 0.877273
| 0.867712
| 0.835266
| 0.798276
| 0.782602
| 0
| 0.052619
| 0.563608
| 19,510
| 540
| 125
| 36.12963
| 0.696735
| 0.034751
| 0
| 0.834515
| 0
| 0
| 0.000426
| 0
| 0
| 0
| 0
| 0
| 0.172577
| 1
| 0.016548
| false
| 0
| 0.014184
| 0
| 0.033097
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
10d6de528bf399048accac65a23760ab25d9ba0a
| 10,704
|
py
|
Python
|
tests/lib_test.py
|
item4/ugoira
|
3d5a1f8f82cf351b2c35349227bf5eb1129d2ae9
|
[
"MIT"
] | 11
|
2018-04-21T08:04:11.000Z
|
2021-02-14T17:09:33.000Z
|
tests/lib_test.py
|
item4/ugoira
|
3d5a1f8f82cf351b2c35349227bf5eb1129d2ae9
|
[
"MIT"
] | 16
|
2018-09-18T11:10:12.000Z
|
2021-02-14T16:47:29.000Z
|
tests/lib_test.py
|
item4/ugoira
|
3d5a1f8f82cf351b2c35349227bf5eb1129d2ae9
|
[
"MIT"
] | 3
|
2018-09-18T10:50:45.000Z
|
2021-01-31T18:01:33.000Z
|
import mimetypes
import zipfile
from PIL import Image
from apng import APNG
import pytest
import responses
from ugoira.lib import (
PixivError,
download_ugoira_zip,
get_metadata_url,
make_apng,
make_via_pillow,
make_zip,
)
def test_download_ugoira_wrong_illust_id(
ugoira_id,
error_meta_body,
):
"""Test :func:`ugoira.lib.download_ugoira_zip` with wrong illust-id.
It must raise :class:`ugoira.lib.PixivError`.
"""
@responses.activate
def test():
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': error_meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
with pytest.raises(PixivError):
download_ugoira_zip(ugoira_id)
test()
def test_download_ugoira_zip_fail_head(
ugoira_id,
meta_body,
small_zip_url,
big_zip_url,
):
"""Test :func:`ugoira.lib.download_ugoira_zip` with broken link.
It must raise :class:`ugoira.lib.PixivError`.
"""
@responses.activate
def case1():
"""
original head - bad
original get - not reached
common head - bad
common get - not reached
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 403,
})
responses.add(**{
'method': responses.HEAD,
'url': small_zip_url,
'status': 403,
})
with pytest.raises(PixivError):
download_ugoira_zip(ugoira_id)
@responses.activate
def case2():
"""
original head - good
original get - bad
common head - bad
common get - not reached
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': big_zip_url,
'status': 403,
})
responses.add(**{
'method': responses.HEAD,
'url': small_zip_url,
'status': 403,
})
with pytest.raises(PixivError):
download_ugoira_zip(ugoira_id)
case1()
case2()
def test_download_ugoira_zip_fail_get(
ugoira_id,
meta_body,
small_zip_url,
big_zip_url,
):
"""Test :func:`ugoira.lib.download_ugoira_zip` with broken link.
It must raise :class:`ugoira.lib.PixivError`.
"""
@responses.activate
def case1():
"""
original head - bad
original get - not reached
common head - good
common get - bad
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 403,
})
responses.add(**{
'method': responses.HEAD,
'url': small_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': small_zip_url,
'status': 403,
})
with pytest.raises(PixivError):
download_ugoira_zip(ugoira_id)
@responses.activate
def case2():
"""
original head - good
original get - bad
common head - good
common get - bad
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': big_zip_url,
'status': 403,
})
responses.add(**{
'method': responses.HEAD,
'url': small_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': small_zip_url,
'status': 403,
})
with pytest.raises(PixivError):
download_ugoira_zip(ugoira_id)
case1()
case2()
def test_download_ugoira_zip_success(
ugoira_id,
meta_body,
small_zip_url,
big_zip_url,
small_image_zip,
big_image_zip,
):
"""Test :func:`ugoira.lib.download_ugoira_zip` with correct link."""
@responses.activate
def case1():
"""
original head - good
original get - good
common head - not reached
common get - not reached
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': big_zip_url,
'body': big_image_zip,
'content_type': 'application/zip',
'status': 200,
})
data, frames = download_ugoira_zip(ugoira_id)
assert data == big_image_zip
@responses.activate
def case2():
"""
original head - good
original get - bad
common head - good
common get - good
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': big_zip_url,
'status': 403,
})
responses.add(**{
'method': responses.HEAD,
'url': small_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': small_zip_url,
'body': small_image_zip,
'content_type': 'application/zip',
'status': 200,
})
data, frames = download_ugoira_zip(ugoira_id)
assert data == small_image_zip
@responses.activate
def case3():
"""
original head - bad
original get - not reached
common head - good
common get - good
"""
responses.reset()
responses.add(**{
'method': responses.GET,
'url': get_metadata_url(ugoira_id),
'body': meta_body,
'content_type': 'application/json',
'status': 200,
'match_querystring': True,
})
responses.add(**{
'method': responses.HEAD,
'url': big_zip_url,
'status': 403,
})
responses.add(**{
'method': responses.HEAD,
'url': small_zip_url,
'status': 200,
})
responses.add(**{
'method': responses.GET,
'url': small_zip_url,
'body': small_image_zip,
'content_type': 'application/zip',
'status': 200,
})
data, frames = download_ugoira_zip(ugoira_id)
assert data == small_image_zip
case1()
case2()
case3()
def test_make_apng(
fx_tmpdir,
small_image_zip,
frames,
):
"""Test :func:`ugoira.lib.make_apng`."""
dest = str(fx_tmpdir / 'test.apng')
make_apng(dest, small_image_zip, frames)
img = APNG.open(dest)
assert len(img.frames) == 3
assert img.frames[0][1].delay == 1000
assert img.frames[1][1].delay == 2000
assert img.frames[2][1].delay == 3000
def test_make_gif(
fx_tmpdir,
small_image_zip,
frames,
):
"""Test :func:`ugoira.lib.make_gif`."""
dest = str(fx_tmpdir / 'test.gif')
make_via_pillow(dest, small_image_zip, frames, 1.0, 'gif')
im = Image.open(dest)
assert im.format == 'GIF'
assert im.info['duration'] == 1000 / 10
assert im.info['loop'] == 0
im.seek(im.tell() + 1)
assert im.format == 'GIF'
assert im.info['duration'] == 2000 / 10
assert im.info['loop'] == 0
im.seek(im.tell() + 1)
assert im.format == 'GIF'
assert im.info['duration'] == 3000 / 10
assert im.info['loop'] == 0
with pytest.raises(EOFError):
assert im.seek(im.tell() + 1)
def test_make_webp(
fx_tmpdir,
small_image_zip,
frames,
):
"""Test :func:`ugoira.lib.make_gif`."""
dest = str(fx_tmpdir / 'test.webp')
make_via_pillow(dest, small_image_zip, frames, 1.0, 'webp')
im = Image.open(dest)
assert im.format == 'WEBP'
assert im.info['loop'] == 0
def test_make_pdf(
fx_tmpdir,
small_image_zip,
frames,
):
"""Test :func:`ugoira.lib.make_gif`."""
dest = str(fx_tmpdir / 'test.pdf')
make_via_pillow(dest, small_image_zip, frames, 1.0, 'pdf')
assert mimetypes.guess_type(dest, strict=True) == ('application/pdf', None)
def test_make_zip(
fx_tmpdir,
small_image_zip,
frames,
):
"""Test :func:`ugoira.lib.make_zip` with correct link."""
dest = str(fx_tmpdir / 'test.zip')
make_zip(dest, small_image_zip)
with zipfile.ZipFile(dest) as f:
assert set(f.namelist()) == set(frames.keys())
| 24.217195
| 79
| 0.523262
| 1,126
| 10,704
| 4.757549
| 0.092362
| 0.064962
| 0.097443
| 0.146164
| 0.853276
| 0.828262
| 0.811835
| 0.803621
| 0.782528
| 0.773007
| 0
| 0.021602
| 0.346973
| 10,704
| 441
| 80
| 24.272109
| 0.744778
| 0.109025
| 0
| 0.825545
| 0
| 0
| 0.113885
| 0
| 0
| 0
| 0
| 0
| 0.065421
| 1
| 0.05296
| false
| 0
| 0.021807
| 0
| 0.074766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10d7ca5bb70663e1ba3dadefc05fcd61e81fb161
| 127
|
py
|
Python
|
python/10_classes/1_classesdealingwithcomplexnumbers.py
|
jaimiles23/hacker_rank
|
0580eac82e5d0989afabb5c2e66faf09713f891b
|
[
"Apache-2.0"
] | null | null | null |
python/10_classes/1_classesdealingwithcomplexnumbers.py
|
jaimiles23/hacker_rank
|
0580eac82e5d0989afabb5c2e66faf09713f891b
|
[
"Apache-2.0"
] | null | null | null |
python/10_classes/1_classesdealingwithcomplexnumbers.py
|
jaimiles23/hacker_rank
|
0580eac82e5d0989afabb5c2e66faf09713f891b
|
[
"Apache-2.0"
] | 3
|
2021-09-22T11:06:58.000Z
|
2022-01-25T09:29:24.000Z
|
Solution to [Classes: Dealing with Complex Numbers](https://www.hackerrank.com/challenges/class-1-dealing-with-complex-numbers)
| 127
| 127
| 0.818898
| 18
| 127
| 5.777778
| 0.777778
| 0.211538
| 0.346154
| 0.480769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.047244
| 127
| 1
| 127
| 127
| 0.85124
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80032fb4f4cf405ea7b37477ef43697af4ca1ed4
| 2,303
|
py
|
Python
|
gg_gui/utilGui/names.py
|
ealcobaca/optimizer_pool
|
e93ac72c1547bc3813a0edf822d5fd453f22ce49
|
[
"MIT"
] | 1
|
2022-03-10T21:46:07.000Z
|
2022-03-10T21:46:07.000Z
|
gg_gui/utilGui/names.py
|
ealcobaca/optimizer_pool
|
e93ac72c1547bc3813a0edf822d5fd453f22ce49
|
[
"MIT"
] | null | null | null |
gg_gui/utilGui/names.py
|
ealcobaca/optimizer_pool
|
e93ac72c1547bc3813a0edf822d5fd453f22ce49
|
[
"MIT"
] | 1
|
2022-03-10T21:46:09.000Z
|
2022-03-10T21:46:09.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class Names():
Chemical_Elemnts = ["Yb", "Pb", "Ca", "Ti", "Mo", "Sn", "Cd", "Ag", "La",
"Cs", "W", "Sb", "Ta", "V", "Fe", "Bi", "Ce", "Nb",
"Cu", "I", "B", "Te", "Al", "Zr", "Gd", "Na", "Ga",
"Cl", "S", "Si", "O", "F", "Mn", "Ba", "K", "Zn",
"N", "Li", "Ge", "Y", "Sr", "P", "Mg", "Er", "As"]
'''
Chemical_Compounds = ['Ag2O', 'Al2O3', 'As2O3', 'As2O5', 'B2O3', 'BaO',
'Bi2O3', 'CaO', 'CdO', 'Ce2O3', 'CeO2', 'Cl', 'Cs2O',
'Cu2O', 'CuO', 'Er2O3', 'F', 'Fe2O3', 'Fe3O4', 'FeO',
'Ga2O3', 'Gd2O3', 'GeO', 'GeO2', 'I', 'K2O', 'La2O3',
'Li2O', 'MgO', 'Mn2O3', 'Mn2O7', 'Mn3O4', 'MnO',
'MnO2', 'Mo2O3', 'Mo2O5', 'MoO', 'MoO2', 'MoO3',
'N', 'N2O5', 'NO2', 'Na2O', 'Nb2O3', 'Nb2O5',
'P2O3', 'P2O5', 'Pb3O4', 'PbO', 'PbO2', 'SO2',
'SO3', 'Sb2O3', 'Sb2O5', 'SbO2', 'SiO', 'SiO2',
'Sn2O3', 'SnO', 'SnO2', 'SrO', 'Ta2O3', 'Ta2O5',
'TeO2', 'TeO3', 'Ti2O3', 'TiO', 'TiO2', 'V2O3',
'V2O5', 'VO2', 'VO6', 'WO3', 'Y2O3', 'Yb2O3', 'ZnO',
'ZrO2']
'''
Chemical_Compounds = ['Ag2O', 'Al2O3', 'As2O3', 'As2O5', 'B2O3', 'BaO',
'Bi2O3', 'CaO', 'CdO', 'Ce2O3', 'CeO2', 'Cl', 'Cs2O',
'Cu2O', 'CuO', 'Er2O3', 'F', 'Fe2O3', 'Fe3O4', 'FeO',
'Ga2O3', 'Gd2O3', 'GeO2', 'I', 'K2O', 'La2O3',
'Li2O', 'MgO', 'Mn2O3', 'Mn2O7', 'Mn3O4', 'MnO',
'MnO2', 'Mo2O3', 'Mo2O5', 'MoO', 'MoO2', 'MoO3', 'N',
'N2O5', 'NO2', 'Na2O', 'Nb2O3', 'Nb2O5', 'P2O3',
'P2O5', 'Pb3O4', 'PbO', 'PbO2', 'SO2', 'SO3',
'Sb2O3', 'Sb2O5', 'SbO2', 'SiO2', 'Sn2O3',
'SnO', 'SnO2', 'SrO', 'Ta2O3', 'Ta2O5',
'TeO3', 'Ti2O3', 'TiO', 'TiO2', 'V2O3', 'V2O5',
'VO2', 'VO6', 'WO3', 'Y2O3', 'Yb2O3', 'ZnO', 'ZrO2']
| 62.243243
| 79
| 0.329136
| 211
| 2,303
| 3.578199
| 0.616114
| 0.045033
| 0.055629
| 0.068874
| 0.815894
| 0.815894
| 0.815894
| 0.739073
| 0.739073
| 0.739073
| 0
| 0.132364
| 0.402953
| 2,303
| 36
| 80
| 63.972222
| 0.416727
| 0.018237
| 0
| 0
| 0
| 0
| 0.284397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
339a3fd2d184b366cd1b87709b56c392e919764e
| 6,484
|
py
|
Python
|
tamcolors/tests/tam_tools_tests/tam_key_manager_tests.py
|
cmcmarrow/tamcolors
|
65a5f2455bbe35a739b98d14af158c3df7feb786
|
[
"Apache-2.0"
] | 29
|
2020-07-17T23:46:17.000Z
|
2022-02-06T05:36:44.000Z
|
tamcolors/tests/tam_tools_tests/tam_key_manager_tests.py
|
sudo-nikhil/tamcolors
|
65a5f2455bbe35a739b98d14af158c3df7feb786
|
[
"Apache-2.0"
] | 42
|
2020-07-25T19:39:52.000Z
|
2021-02-24T01:19:58.000Z
|
tamcolors/tests/tam_tools_tests/tam_key_manager_tests.py
|
sudo-nikhil/tamcolors
|
65a5f2455bbe35a739b98d14af158c3df7feb786
|
[
"Apache-2.0"
] | 8
|
2020-07-18T23:02:48.000Z
|
2020-12-30T04:07:35.000Z
|
# built in libraries
import unittest.mock
# tamcolors libraries
from tamcolors import tam_tools
class TAMKeyManagerTests(unittest.TestCase):
def test_init_key_manger(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
self.assertIsInstance(key_manger, tam_tools.tam_key_manager.TAMKeyManager)
def test_iter(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("a", "NORMAL"), ("B", "NORMAL")))
count = None
for count, key in enumerate(key_manger):
self.assertIsInstance(key, tuple)
self.assertEqual(count, 1)
def test_update(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("a", "NORMAL"), ("B", "NORMAL")))
self.assertEqual(key_manger.get_raw_user_input(), (("a", "NORMAL"), ("B", "NORMAL")))
def test_update_2(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("!", "NORMAL"),))
self.assertEqual(key_manger.get_raw_user_input(), (("!", "NORMAL"),))
def test_get_key_state(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
self.assertFalse(key_manger.get_key_state("A"))
def test_get_key_state_2(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("a", "NORMAL"), ("B", "NORMAL")))
self.assertFalse(key_manger.get_key_state("A"))
self.assertTrue(key_manger.get_key_state("a"))
self.assertTrue(key_manger.get_key_state("B"))
def test_silent_key_state(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
self.assertFalse(key_manger.silent_key_state("A"))
self.assertFalse(key_manger.silent_key_state("A"))
def test_silent_key_state_2(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("a", "NORMAL"), ("B", "NORMAL")))
self.assertTrue(key_manger.silent_key_state("B"))
self.assertFalse(key_manger.silent_key_state("B"))
def test_silent_key_state_3(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("A", "NORMAL"), ("C", "NORMAL")))
self.assertTrue(key_manger.silent_key_state("C"))
self.assertFalse(key_manger.silent_key_state("C"))
self.assertTrue(key_manger.silent_key_state("A"))
self.assertFalse(key_manger.silent_key_state("A"))
self.assertFalse(key_manger.silent_key_state("B"))
self.assertFalse(key_manger.silent_key_state("B"))
key_manger.update((("a", "NORMAL"), ("B", "NORMAL")))
self.assertTrue(key_manger.silent_key_state("B"))
self.assertFalse(key_manger.silent_key_state("B"))
def test_get_user_input(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
self.assertIs(key_manger.get_user_input(), None)
self.assertIs(key_manger.get_user_input(), None)
def test_get_user_input_2(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("A", "NORMAL"), ("C", "NORMAL")))
self.assertEqual(key_manger.get_user_input(), ("A", "NORMAL"))
self.assertEqual(key_manger.get_user_input(), ("C", "NORMAL"))
self.assertIs(key_manger.get_user_input(), None)
def test_get_user_input_3(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("X", "NORMAL"),))
self.assertEqual(key_manger.get_user_input(), ("X", "NORMAL"))
self.assertIs(key_manger.get_user_input(), None)
key_manger.update((("A", "NORMAL"), ("C", "NORMAL")))
self.assertEqual(key_manger.get_user_input(), ("A", "NORMAL"))
self.assertEqual(key_manger.get_user_input(), ("C", "NORMAL"))
self.assertIs(key_manger.get_user_input(), None)
def test_get_raw_user_input(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
self.assertEqual(list(key_manger.get_raw_user_input()), [])
def get_raw_user_input_2(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("A", "NORMAL"), ("C", "NORMAL")))
self.assertEqual(list(key_manger.get_raw_user_input()), [("A", "NORMAL"), ("C", "NORMAL")])
def get_raw_user_input_3(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_manger.update((("A", "NORMAL"), ("C", "NORMAL")))
self.assertEqual(list(key_manger.get_raw_user_input()), [("A", "NORMAL"), ("C", "NORMAL")])
key_manger.update((("4", "NORMAL"), ("1", "NORMAL")))
self.assertEqual(list(key_manger.get_raw_user_input()), [("4", "NORMAL"), ("1", "NORMAL")])
def test_get_user_input_generator(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_generator = key_manger.get_user_input_generator()
self.assertRaises(StopIteration, next, key_generator)
def test_get_user_input_generator_2(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_generator = key_manger.get_user_input_generator()
self.assertRaises(StopIteration, next, key_generator)
key_manger.update((("A", "NORMAL"), ("C", "NORMAL")))
key_generator = key_manger.get_user_input_generator()
self.assertEqual(next(key_generator), ("A", "NORMAL"))
self.assertEqual(next(key_generator), ("C", "NORMAL"))
self.assertRaises(StopIteration, next, key_generator)
def test_get_user_input_generator_3(self):
key_manger = tam_tools.tam_key_manager.TAMKeyManager()
key_generator = key_manger.get_user_input_generator()
self.assertRaises(StopIteration, next, key_generator)
key_manger.update((("B", "NORMAL"), ("C", "NORMAL")))
key_generator = key_manger.get_user_input_generator()
self.assertEqual(next(key_generator), ("B", "NORMAL"))
self.assertEqual(next(key_generator), ("C", "NORMAL"))
self.assertRaises(StopIteration, next, key_generator)
key_manger.update((("B", "NORMAL"), ("C", "NORMAL"), ("5", "NORMAL")))
key_generator = key_manger.get_user_input_generator()
self.assertEqual(next(key_generator), ("B", "NORMAL"))
self.assertEqual(next(key_generator), ("C", "NORMAL"))
self.assertEqual(next(key_generator), ("5", "NORMAL"))
self.assertRaises(StopIteration, next, key_generator)
| 40.525
| 99
| 0.669957
| 828
| 6,484
| 4.88285
| 0.065217
| 0.166955
| 0.07717
| 0.079891
| 0.917141
| 0.903042
| 0.889439
| 0.86322
| 0.811526
| 0.771457
| 0
| 0.003185
| 0.176743
| 6,484
| 159
| 100
| 40.779874
| 0.754215
| 0.005861
| 0
| 0.612613
| 0
| 0
| 0.058979
| 0
| 0
| 0
| 0
| 0
| 0.432432
| 1
| 0.162162
| false
| 0
| 0.018018
| 0
| 0.189189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
339b6f9e5fdc45f6c2b024fbc8a6a74fd027c509
| 74
|
py
|
Python
|
cop_server/imports.py
|
gaming32/CreatorsOfPlay
|
f22b6c7b637d5696ce03fbf24012430e0dd1be29
|
[
"MIT"
] | null | null | null |
cop_server/imports.py
|
gaming32/CreatorsOfPlay
|
f22b6c7b637d5696ce03fbf24012430e0dd1be29
|
[
"MIT"
] | null | null | null |
cop_server/imports.py
|
gaming32/CreatorsOfPlay
|
f22b6c7b637d5696ce03fbf24012430e0dd1be29
|
[
"MIT"
] | null | null | null |
from cop_server.client import Client
from cop_server.server import Server
| 24.666667
| 36
| 0.864865
| 12
| 74
| 5.166667
| 0.416667
| 0.225806
| 0.419355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 2
| 37
| 37
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d50aaecd7d94dd563928190ee0fc9cc41f6f42c0
| 10,794
|
py
|
Python
|
tests/test_homewizard_energy.py
|
DCSBL/python-homewizard-energy
|
e3b08f8f5327e41f1cc2a0389b527ae9e94da80e
|
[
"Apache-2.0"
] | 1
|
2022-02-28T14:06:08.000Z
|
2022-02-28T14:06:08.000Z
|
tests/test_homewizard_energy.py
|
DCSBL/python-homewizard-energy
|
e3b08f8f5327e41f1cc2a0389b527ae9e94da80e
|
[
"Apache-2.0"
] | 32
|
2022-02-11T04:38:57.000Z
|
2022-03-30T04:43:11.000Z
|
tests/test_homewizard_energy.py
|
DCSBL/python-homewizard-energy
|
e3b08f8f5327e41f1cc2a0389b527ae9e94da80e
|
[
"Apache-2.0"
] | 1
|
2022-03-14T08:44:08.000Z
|
2022-03-14T08:44:08.000Z
|
"""Test for HomeWizard Energy."""
from unittest.mock import patch
import aiohttp
import pytest
from homewizard_energy import HomeWizardEnergy
from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError
from . import load_fixtures
@pytest.mark.asyncio
async def test_request_returns_json(aresponses):
"""Test JSON response is handled correctly."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"status": "ok"}',
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
return_value = await api.request("api")
assert isinstance(return_value, dict)
assert return_value["status"] == "ok"
await api.close()
@pytest.mark.asyncio
async def test_request_internal_session(aresponses):
"""Test session is closed when created internally."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"status": "ok"}',
),
)
api = HomeWizardEnergy("example.com")
assert await api.request("api")
await api.close()
@pytest.mark.asyncio
async def test_request_returns_txt(aresponses):
"""Test request returns raw text when non-json."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/not-json"},
text='{"status": "ok"}',
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
return_value = await api.request("api")
assert isinstance(return_value, str)
assert return_value == '{"status": "ok"}'
await api.close()
@pytest.mark.asyncio
async def test_request_detects_403(aresponses):
"""Test request detects disabled API."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
status=403,
headers={"Content-Type": "application/json"},
text='{"status": "ok"}',
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
with pytest.raises(DisabledError):
await api.request("api")
await api.close()
@pytest.mark.asyncio
async def test_request_detects_non_200(aresponses):
"""Test detects non-ok response."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
status=500,
headers={"Content-Type": "application/json"},
text='{"status": "ok"}',
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
with pytest.raises(RequestError):
await api.request("api")
await api.close()
@pytest.mark.asyncio
async def test_request_detects_clienterror():
"""Test other clienterror."""
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
with patch.object(
session, "request", side_effect=aiohttp.ClientError
), pytest.raises(RequestError):
await api.request("api")
await api.close()
@pytest.mark.asyncio
async def test_get_device_object(aresponses):
"""Test device object is fetched and sets detected values."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
text=load_fixtures("device.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
device = await api.device()
assert device
assert device.product_type == "HWE-P1"
# pylint: disable=protected-access
assert api._detected_api_version == "v1"
assert api._detected_product_type == "HWE-P1"
await api.close()
@pytest.mark.asyncio
async def test_get_device_object_detects_invalid_api(aresponses):
"""Test raises error when invalid API is used."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
text=load_fixtures("device_invalid_api.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
with pytest.raises(UnsupportedError):
await api.device()
await api.close()
@pytest.mark.asyncio
async def test_get_data_object(aresponses):
"""Test fetches data object and device object when unknown."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
text=load_fixtures("device.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
aresponses.add(
"example.com",
"/api/v1/data",
"GET",
aresponses.Response(
text=load_fixtures("data_p1.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
data = await api.data()
assert data
assert data.smr_version == 50
await api.close()
@pytest.mark.asyncio
async def test_get_data_object_with_known_device(aresponses):
"""Test fetches data object."""
aresponses.add(
"example.com",
"/api/v1/data",
"GET",
aresponses.Response(
text=load_fixtures("data_p1.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
# pylint: disable=protected-access
api._detected_api_version = "v1"
data = await api.data()
assert data
assert data.smr_version == 50
await api.close()
@pytest.mark.asyncio
async def test_get_state_object(aresponses):
"""Test fetches state object and device object when unknown."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
text=load_fixtures("device_energysocket.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
aresponses.add(
"example.com",
"/api/v1/state",
"GET",
aresponses.Response(
text=load_fixtures("state.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
state = await api.state()
assert state
assert not state.power_on
await api.close()
@pytest.mark.asyncio
async def test_get_state_object_with_known_device(aresponses):
"""Test fetches state object."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
text=load_fixtures("device_energysocket.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
aresponses.add(
"example.com",
"/api/v1/state",
"GET",
aresponses.Response(
text=load_fixtures("state.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
# pylint: disable=protected-access
api._detected_api_version = "v1"
api._detected_product_type = "HWE-SKT"
state = await api.state()
assert state
assert not state.power_on
await api.close()
@pytest.mark.asyncio
async def test_get_state_object_returns_null_not_supported(aresponses):
"""Test detects device has no support for state."""
aresponses.add(
"example.com",
"/api",
"GET",
aresponses.Response(
text=load_fixtures("device.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
aresponses.add(
"example.com",
"/api/v1/data",
"GET",
aresponses.Response(
text=load_fixtures("state.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
state = await api.state()
assert not state
await api.close()
@pytest.mark.asyncio
async def test_state_set(aresponses):
"""Test state set."""
aresponses.add(
"example.com",
"/api/v1/state",
"PUT",
aresponses.Response(
text=load_fixtures("state.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
state = await api.state_set(power_on=False, switch_lock=False, brightness=255)
assert state
await api.close()
@pytest.mark.asyncio
async def test_state_set_detects_no_statechange(aresponses):
"""Test state set does not send request when nothing is changed."""
aresponses.add(
"example.com",
"/api/v1/state",
"PUT",
aresponses.Response(
text=load_fixtures("state.json"),
status=200,
headers={"Content-Type": "application/json; charset=utf-8"},
),
)
async with aiohttp.ClientSession() as session:
api = HomeWizardEnergy("example.com", clientsession=session)
state = await api.state_set()
assert not state
| 26.651852
| 86
| 0.594775
| 1,120
| 10,794
| 5.63125
| 0.109821
| 0.052323
| 0.057079
| 0.065641
| 0.825274
| 0.804186
| 0.802283
| 0.790867
| 0.78294
| 0.766133
| 0
| 0.012088
| 0.2796
| 10,794
| 404
| 87
| 26.717822
| 0.798997
| 0.011766
| 0
| 0.788079
| 0
| 0
| 0.158893
| 0.007122
| 0
| 0
| 0
| 0
| 0.066225
| 1
| 0
| false
| 0
| 0.019868
| 0
| 0.019868
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d181ebfec1ee3b7d4c8772d47f7234235aece2f
| 47
|
py
|
Python
|
p2016_05_28_python_path_find/os.py
|
zhyq0826/blog-code
|
4369d653dea4a7a054dc796d14faea727973258f
|
[
"MIT"
] | 1
|
2018-07-07T14:35:55.000Z
|
2018-07-07T14:35:55.000Z
|
p2016_05_28_python_path_find/os.py
|
zhyq0826/blog-code
|
4369d653dea4a7a054dc796d14faea727973258f
|
[
"MIT"
] | null | null | null |
p2016_05_28_python_path_find/os.py
|
zhyq0826/blog-code
|
4369d653dea4a7a054dc796d14faea727973258f
|
[
"MIT"
] | null | null | null |
print 'I am os in p2015_05_28_python_path_find'
| 47
| 47
| 0.851064
| 11
| 47
| 3.181818
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 0.106383
| 47
| 1
| 47
| 47
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0.8125
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
1d8037fdb166a79d0c82e91c6e3072aec29c96c4
| 11,848
|
py
|
Python
|
geomagio/processing/obsrio.py
|
usgs/geomag-algorithms
|
a83a0e36bed9307828e37b9130c25dbc26dd1bc9
|
[
"CC0-1.0"
] | 49
|
2015-10-06T17:57:20.000Z
|
2022-01-12T18:40:17.000Z
|
geomagio/processing/obsrio.py
|
usgs/geomag-algorithms
|
a83a0e36bed9307828e37b9130c25dbc26dd1bc9
|
[
"CC0-1.0"
] | 229
|
2015-01-26T20:10:36.000Z
|
2022-03-12T00:46:33.000Z
|
geomagio/processing/obsrio.py
|
usgs/geomag-algorithms
|
a83a0e36bed9307828e37b9130c25dbc26dd1bc9
|
[
"CC0-1.0"
] | 44
|
2015-03-03T16:18:18.000Z
|
2021-11-06T17:07:38.000Z
|
from typing import List, Optional
import typer
from ..algorithm import Algorithm, FilterAlgorithm
from ..Controller import (
Controller,
get_realtime_interval,
)
from ..TimeseriesFactory import TimeseriesFactory
from .factory import get_edge_factory, get_miniseed_factory
def main():
typer.run(obsrio_filter)
def obsrio_filter(
interval: str,
observatory: str,
input_factory: Optional[str] = None,
host: str = "127.0.0.1",
port: str = 2061,
output_factory: Optional[str] = None,
output_port: int = typer.Option(
2061, help="Port where output factory writes data."
),
output_read_port: int = typer.Option(
2061, help="Port where output factory reads data"
),
realtime_interval: int = 600,
update_limit: int = 10,
):
if interval == "realtime":
filter_realtime(
observatory=observatory,
input_factory=input_factory,
host=host,
port=port,
output_factory=output_factory,
output_port=output_port,
output_read_port=output_read_port,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
elif interval in ["hour", "day"]:
input_factory = get_edge_factory(host=host, port=port)
output_factory = get_miniseed_factory(
host=host, port=output_read_port, write_port=output_port
)
if interval == "hour":
obsrio_hour(
observatory=observatory,
input_factory=input_factory,
output_factory=output_factory,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
elif interval == "day":
obsrio_day(
observatory=observatory,
input_factory=input_factory,
output_factory=output_factory,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
else:
raise ValueError("Invalid interval")
def filter_realtime(
observatory: str,
input_factory: Optional[str] = None,
host: str = "127.0.0.1",
port: str = 2061,
output_factory: Optional[str] = None,
output_port: int = typer.Option(
2061, help="Port where output factory writes data."
),
output_read_port: int = typer.Option(
2061, help="Port where output factory reads data"
),
realtime_interval: int = 600,
update_limit: int = 10,
):
"""Filter 10Hz miniseed, 1 second, one minute, and temperature data.
Defaults set for realtime processing; can also be implemented to update legacy data"""
if input_factory == "miniseed":
input_factory = get_miniseed_factory(host=host, port=port)
elif input_factory == "edge":
input_factory = get_edge_factory(host=host, port=port)
if output_factory == "miniseed":
output_factory = get_miniseed_factory(
host=host, port=output_read_port, write_port=output_port
)
elif output_factory == "edge":
output_factory = get_edge_factory(
host=host, port=output_read_port, write_port=output_port
)
obsrio_tenhertz(
observatory=observatory,
input_factory=input_factory,
output_factory=output_factory,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
obsrio_second(
observatory=observatory,
input_factory=input_factory,
output_factory=output_factory,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
obsrio_minute(
observatory=observatory,
input_factory=input_factory,
output_factory=output_factory,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
obsrio_temperatures(
observatory=observatory,
input_factory=input_factory,
output_factory=output_factory,
realtime_interval=realtime_interval,
update_limit=update_limit,
)
def obsrio_day(
observatory: str,
input_factory: Optional[TimeseriesFactory] = None,
output_factory: Optional[TimeseriesFactory] = None,
realtime_interval: int = 86400,
update_limit: int = 7,
):
"""Filter 1 second edge H,E,Z,F to 1 day miniseed U,V,W,F."""
starttime, endtime = get_realtime_interval(realtime_interval)
controller = Controller(
inputFactory=input_factory or get_edge_factory(),
inputInterval="minute",
outputFactory=output_factory or get_miniseed_factory(),
outputInterval="day",
)
renames = {"H": "U", "E": "V", "Z": "W", "F": "F"}
for input_channel in renames.keys():
output_channel = renames[input_channel]
controller.run_as_update(
algorithm=FilterAlgorithm(
input_sample_period=60.0,
output_sample_period=86400.0,
inchannels=(input_channel,),
outchannels=(output_channel,),
),
observatory=(observatory,),
output_observatory=(observatory,),
starttime=starttime,
endtime=endtime,
input_channels=(input_channel,),
output_channels=(output_channel,),
realtime=realtime_interval,
rename_output_channel=((input_channel, output_channel),),
update_limit=update_limit,
)
def obsrio_hour(
observatory: str,
input_factory: Optional[TimeseriesFactory] = None,
output_factory: Optional[TimeseriesFactory] = None,
realtime_interval: int = 600,
update_limit: int = 10,
):
"""Filter 1 minute edge H,E,Z,F to 1 hour miniseed U,V,W,F."""
starttime, endtime = get_realtime_interval(realtime_interval)
controller = Controller(
inputFactory=input_factory or get_edge_factory(),
inputInterval="minute",
outputFactory=output_factory or get_miniseed_factory(),
outputInterval="hour",
)
renames = {"H": "U", "E": "V", "Z": "W", "F": "F"}
for input_channel in renames.keys():
output_channel = renames[input_channel]
controller.run_as_update(
algorithm=FilterAlgorithm(
input_sample_period=60.0,
output_sample_period=3600.0,
inchannels=(input_channel,),
outchannels=(output_channel,),
),
observatory=(observatory,),
output_observatory=(observatory,),
starttime=starttime,
endtime=endtime,
input_channels=(input_channel,),
output_channels=(output_channel,),
realtime=realtime_interval,
rename_output_channel=((input_channel, output_channel),),
update_limit=update_limit,
)
def obsrio_minute(
observatory: str,
channels: List[str] = ["H", "E", "Z", "F"],
input_factory: Optional[TimeseriesFactory] = None,
output_factory: Optional[TimeseriesFactory] = None,
realtime_interval: int = 600,
update_limit: int = 10,
):
"""Filter 1Hz legacy channels to 1 minute legacy.
For H,E,Z,F: should be called after obsrio_second() and obsrio_tenhertz(),
which populate 1Hz legacy H,E,Z,F.
"""
starttime, endtime = get_realtime_interval(realtime_interval)
controller = Controller(
inputFactory=input_factory or get_edge_factory(),
inputInterval="second",
outputFactory=output_factory or get_edge_factory(),
outputInterval="minute",
)
for channel in channels:
controller.run_as_update(
algorithm=FilterAlgorithm(
input_sample_period=1,
output_sample_period=60,
inchannels=(channel,),
outchannels=(channel,),
),
observatory=(observatory,),
output_observatory=(observatory,),
starttime=starttime,
endtime=endtime,
input_channels=(channel,),
output_channels=(channel,),
realtime=realtime_interval,
update_limit=update_limit,
)
def obsrio_second(
observatory: str,
input_factory: Optional[TimeseriesFactory] = None,
output_factory: Optional[TimeseriesFactory] = None,
realtime_interval: int = 600,
update_limit: int = 10,
):
"""Copy 1Hz miniseed F to 1Hz legacy F."""
starttime, endtime = get_realtime_interval(realtime_interval)
controller = Controller(
algorithm=Algorithm(inchannels=("F",), outchannels=("F",)),
inputFactory=input_factory or get_miniseed_factory(),
outputFactory=output_factory or get_edge_factory(),
)
controller.run_as_update(
observatory=(observatory,),
output_observatory=(observatory,),
starttime=starttime,
endtime=endtime,
input_channels=("F",),
output_channels=("F",),
realtime=realtime_interval,
update_limit=update_limit,
)
def obsrio_temperatures(
observatory: str,
input_factory: Optional[TimeseriesFactory] = None,
output_factory: Optional[TimeseriesFactory] = None,
realtime_interval: int = 600,
update_limit: int = 10,
):
"""Filter temperatures 1Hz miniseed (LK1-4) to 1 minute legacy (UK1-4)."""
starttime, endtime = get_realtime_interval(realtime_interval)
controller = Controller(
inputFactory=input_factory or get_miniseed_factory(),
inputInterval="second",
outputFactory=output_factory or get_edge_factory(),
outputInterval="minute",
)
renames = {"LK1": "UK1", "LK2": "UK2", "LK3": "UK3", "LK4": "UK4"}
for input_channel in renames.keys():
output_channel = renames[input_channel]
controller.run_as_update(
algorithm=FilterAlgorithm(
input_sample_period=1,
output_sample_period=60,
inchannels=(input_channel,),
outchannels=(output_channel,),
),
observatory=(observatory,),
output_observatory=(observatory,),
starttime=starttime,
endtime=endtime,
input_channels=(input_channel,),
output_channels=(output_channel,),
realtime=realtime_interval,
rename_output_channel=((input_channel, output_channel),),
update_limit=update_limit,
)
def obsrio_tenhertz(
observatory: str,
input_factory: Optional[TimeseriesFactory] = None,
output_factory: Optional[TimeseriesFactory] = None,
realtime_interval: int = 600,
update_limit: int = 10,
):
"""Filter 10Hz miniseed U,V,W to 1Hz legacy H,E,Z."""
starttime, endtime = get_realtime_interval(realtime_interval)
controller = Controller(
inputFactory=input_factory
or get_miniseed_factory(convert_channels=("U", "V", "W")),
inputInterval="tenhertz",
outputFactory=output_factory or get_edge_factory(),
outputInterval="second",
)
renames = {"U": "H", "V": "E", "W": "Z"}
for input_channel in renames.keys():
output_channel = renames[input_channel]
controller.run_as_update(
algorithm=FilterAlgorithm(
input_sample_period=0.1,
output_sample_period=1,
inchannels=(input_channel,),
outchannels=(output_channel,),
),
observatory=(observatory,),
output_observatory=(observatory,),
starttime=starttime,
endtime=endtime,
input_channels=(input_channel,),
output_channels=(output_channel,),
realtime=realtime_interval,
rename_output_channel=((input_channel, output_channel),),
update_limit=update_limit,
)
| 34.542274
| 90
| 0.63209
| 1,207
| 11,848
| 5.935377
| 0.096935
| 0.091569
| 0.036293
| 0.058068
| 0.822446
| 0.818537
| 0.813093
| 0.785874
| 0.776801
| 0.749721
| 0
| 0.01529
| 0.271354
| 11,848
| 342
| 91
| 34.643275
| 0.814549
| 0.048447
| 0
| 0.71521
| 0
| 0
| 0.030479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029126
| false
| 0
| 0.019417
| 0
| 0.048544
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d56d0bd873cc639742a05c7b707c617ddf365a09
| 58
|
py
|
Python
|
lw_visutils/nn/__init__.py
|
wolterlw/lw_visutils
|
1487a235c8c0cd71b42758ccb13760a45689889b
|
[
"MIT"
] | null | null | null |
lw_visutils/nn/__init__.py
|
wolterlw/lw_visutils
|
1487a235c8c0cd71b42758ccb13760a45689889b
|
[
"MIT"
] | null | null | null |
lw_visutils/nn/__init__.py
|
wolterlw/lw_visutils
|
1487a235c8c0cd71b42758ccb13760a45689889b
|
[
"MIT"
] | null | null | null |
import lw_visutils.nn.hourglass
import lw_visutils.nn.unet
| 29
| 31
| 0.87931
| 10
| 58
| 4.9
| 0.6
| 0.326531
| 0.653061
| 0.734694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051724
| 58
| 2
| 32
| 29
| 0.890909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d596452e81ac55ab1d6a849da60a82b57ddb4167
| 2,389
|
py
|
Python
|
walletbackend/wallet/models.py
|
Fajar2020/MiniEWallet
|
bd843ab0a957f999dd557dd1f4e27a5d9d08b828
|
[
"MIT"
] | null | null | null |
walletbackend/wallet/models.py
|
Fajar2020/MiniEWallet
|
bd843ab0a957f999dd557dd1f4e27a5d9d08b828
|
[
"MIT"
] | 3
|
2021-10-06T19:43:07.000Z
|
2022-02-27T07:55:40.000Z
|
walletbackend/wallet/models.py
|
Fajar2020/MiniEWallet
|
bd843ab0a957f999dd557dd1f4e27a5d9d08b828
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class TypeActivity(models.Model):
title= models.CharField(max_length=32, blank=False, null=False)
def __str__(self):
return self.title
class Bank(models.Model):
code = models.CharField(max_length=150, blank=False, null=False)
def __str__(self):
return self.code
class BankBalance(models.Model):
bank = models.ForeignKey(Bank, on_delete=models.CASCADE)
balance = models.IntegerField(default=0)
balance_achieve = models.IntegerField(default=0)
enable=models.BooleanField(default=False)
class BankBalanceHistory(models.Model):
bank_balance = models.ForeignKey(BankBalance, on_delete=models.CASCADE)
balance_before = models.IntegerField(default=0)
balance_after = models.IntegerField(default=0)
activity = models.CharField(max_length=32, blank=False, null=False)
typeActivity = models.ForeignKey(TypeActivity, on_delete=models.CASCADE)
ip = models.CharField(max_length=32, blank=False, null=False)
location = models.CharField(max_length=250, blank=False, null=False)
user_agent = models.CharField(max_length=150, blank=False, null=False)
author = models.CharField(max_length=150, blank=False, null=False)
class UserBalance(models.Model):
bank = models.ForeignKey(Bank, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
balance = models.IntegerField(default=0)
balance_achieve = models.IntegerField(default=0)
class UserBalanceHistory(models.Model):
user_balance = models.ForeignKey(UserBalance, on_delete=models.CASCADE)
balance_before = models.IntegerField(default=0)
balance_after = models.IntegerField(default=0)
activity = models.CharField(max_length=32, blank=False, null=False)
typeActivity = models.ForeignKey(TypeActivity, on_delete=models.CASCADE)
ip = models.CharField(max_length=32, blank=False, null=False)
location = models.CharField(max_length=250, blank=False, null=False)
user_agent = models.CharField(max_length=150, blank=False, null=False)
author = models.CharField(max_length=150, blank=False, null=False)
send_to = models.ForeignKey(User, on_delete=models.CASCADE)
send_to_bank = models.ForeignKey(Bank, on_delete=models.CASCADE)
confirm=models.BooleanField(default=False)
| 43.436364
| 76
| 0.757639
| 307
| 2,389
| 5.758958
| 0.179153
| 0.10181
| 0.122172
| 0.162896
| 0.766968
| 0.766968
| 0.766968
| 0.732466
| 0.707014
| 0.632919
| 0
| 0.01885
| 0.133947
| 2,389
| 55
| 77
| 43.436364
| 0.835669
| 0.010046
| 0
| 0.55814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.046512
| 0.046512
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
d5ab5fa3846077fa6030afca348b4337c50426a4
| 13,463
|
py
|
Python
|
MISSIONS/air_fight/environment/renderserver/server.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
MISSIONS/air_fight/environment/renderserver/server.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
MISSIONS/air_fight/environment/renderserver/server.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\xd1\x50\x8c\x64\x26\x42\xd6\x01\x9a\xaf\x11\xbc\x6b\x8e\x6c\x7f\x32\x6f\x43\x8a\x03\xa3\xba\xae\x2a\x34\x42\x60\x5c\xad\x11\x56\xd4\x14\x69\xe0\xcb\x4d\xca\xee\x53\x0d\x25\xa8\x08\x48\x72\x85\x66\xd6\xbc\x92\xa1\xf0\xb2\xa5\xc2\x0a\x42\x7b\xa3\x34\xd1\x69\xe2\x61\x12\x7d\xe4\xae\xbb\xdf\x54\xea\x6e\x8a\x1c\xd3\x79\x24\xbc\xa5\x39\x0a\xca\xdb\x25\xb8\x5e\xf2\x1a\xc4\x9d\x8a\x57\x03\x78\x14\xeb\x45\xd1\xe5\x62\xd1\xf8\xf9\x1b\x4b\x3d\xd7\xa0\x96\x40\xb3\xa4\x1d\x51\xfa\xfb\x81\x86\x3a\xb0\xf9\x9b\x47\xff\x56\x3b\x33\x24\xcf\x30\xdf\xf2\x39\x3e\xc9\x43\x45\x65\x10\x8b\xba\x16\xd3\xee\xaa\x34\x23\xde\x7f\xe3\x93\x56\xaa\xac\xea\x05\x3c\x92\xe1\x4a\x36\x68\x72\x75\x50\x14\x8a\x63\xd7\x97\xe6\x45\xa3\x42\xe5\x67\x7b\x01\x43\xbf\xe1\x6d\x8b\xaf\xa4\x5d\xc6\x73\x12\xb9\x6b\xda\x94\x43\x6a\x43\xf5\xf8\xee\x1f\xc6\x12\xe5\x47\xd4\x84\x07\xd3\x16\x05\x4f\x67\x6e\x1d\x2a\x2b\xfd\xf2\xbe\x3d\x7f\x82\xcb\x36\x41\x64\x21\x7a\xa2\xaa\xf2\x94\x17\x8c\xb2\x01\x3d\xdf\x7d\x13\x14\x00\x2d\x71\x12\xe3\x91\xf7\xf6\x97\xda\x53\x97\xb9\x1d\xd6\xad\x60\x89\x40\xcb\x02\xff\x87\x3b\xba\xfb\xf2\x7b\x78\x3c\x55\x81\xb5\x2b\x70\x4d\x1c\x9c\x05\x67\xf1\x54\x45\x06\xee\xd5\xb7\x6f\x39\x96\xcd\xae\x21\xe9\x95\xdc\xef\x12\x80\xfc\xa5\x87\xb2\xe1\xd8\x3b\x5f\xa0\xa4\x28\x06\x5a\xc1\x39\x24\x49\x31\x45\xde\xf4\xc4\xd2\x16\x96\x14\x30\xc3\xef\x6a\xa2\xf3\xf5\xa1\x23\x59\xa1\x42\xd3\x51\x0a\xb3\x30\xd6\x82\x2b\x94\xa6\xb9\xff\x47\xfc\xb8\x9c\x57\x09\x30\x58\x0e\x12\x32\xff\xcb\x0d\x0e\xf5\x5f\x76\xac\x42\xf2\x6f\xe3\xa6\xd8\x67\x9e\x25\x57\x19\xe6\xe0\xcb\x73\x54\xdb\x3c\x16\x4a\x3d\x88\xa2\x8b\xd3\xc1\xe8\xce\x2b\xcf\x6b\x37\x43\x2c\x44\x41\x13\xca\x29\x7a\x96\x9c\xf6\x10\x29\xe7\xaf\x7e\xed\x75\x82\x79\x00\x9a\x4f\x7e\x8a\xb6\x4b\x18\x70\x5e\xcd\xcf\x02\x43\x05\x39\x12\x3d\x18\x8d\x2e\x5a\x9e\x05\xfd\x58\x1b\x97\xbe\xc4\x7a\x49\x33\x0d\x1b\x0d\x98\xc0\x8f\x2c\x37\x9b\xdc\xf7\xa1\xb3\xcd\xe5\x2e\xd2\xc7\x35\x7c\x1c\xee\x4e\xc6\x9d\x1d\x93\xdd\x77\xaa\x10\xc9\x56\x02\x7f\xc8\x6c\x40\xef\xf4\x00\x29\xb9\xf7\x62\x1d\x59\xa8\x78\x83\x19\xdd\xde\x8d\x60\x77\x4c\x03\x56\x21\x8d\x46\x68\x79\x17\x7b\x80\x9c\x91\xe7\xd4\x00\x89\x63\x44\xc8\x35\xc0\xcf\x63\x1c\xeb\x6a\x18\x00\x83\x50\xa0\xa6\xab\x83\x79\x23\xb6\x50\x68\x27\x91\x3f\x68\xb8\x6a\xdd\x4b\x3c\x04\x03\x98\x6a\x6d\x9a\xac\xc0\x45\xa4\xff\x52\xc6\x5f\x06\x7b\x7c\x59\x83\x8d\xf3\xf1\x4e\x15\xad\x8f\x44\x95\xd4\xfb\x41\x48\x86\x18\x33\x2f\x1f\xe9\x2a\xa8\x78\xe6\x18\x42\xbb\xa0\x13\x9e\xec\x91\x19\xf1\x8c\xac\xa4\x8a\x8e\x43\xf3\x68\xb6\x31\x15\x49\xad\xf2\xb0\x8b\xb5\x59\xf6\x75\x32\x46\x84\xfb\x2c\x1d\x53\xad\xd6\xb3\x16\xa1\xfd\x7c\x40\x47\x34\xf5\xc9\x43\x1f\x58\xc9\x95\xed\x15\x71\x53\xf6\xa3\x42\x99\x80\x85\x1a\xdc\xe8\x92\xc4\x67\xd6\x01\x2b\xe0\xca\x6e\xaa\x0a\x7b\x75\x64\x35\x18\x50\x8c\x2d\x05\xfb\xb1\x9d\x8f\x40\x67\x9b\x91\x9f\x04\x22\x1f\x87\x84\x62\x0a\xfe\x22\x58\x48\xa9\xd2\x6d\x7e\xe0\x3a\x34\xdb\xf7\xb9\xaa\x75\xcd\xc8\x6d\x37\x9d\x84\xc5\x86\xe3\xb8\x16\xf9\x09\xc1\x82\x90\xe2\x55\xd7\x63\x2a\xb8\x4d\x94\xbc\x4d\xcf\x66\x2b\xbd\x65\x0c\x61\x85\x83\xb0\x8e\x0f\x50\x71\x60\xea\xcb\xf4\x53\x6b\x15\x06\xb1\x00\xbe\x02\xd8\x35\xa6\x06\xc6\x4f\xa6\x68\x7d\x3e\x90\x31\xc2\x56\xda\xaf\x54\x5b\x4a\x76\xde\x7e\x42\xc5\xf3\x01\xb3\x4c\xbe\xd8\xb1\x6a\xb3\x38\x70\xd5\x3d\x7a\x5a\x78\x86\x30\xc6\x87\x22\xc6\x21\x96\x87\xbe\xe7\x94\x3c\x7f\x6a\xb3\x73\x2e\xfc\xb9\xd1\xe2\xeb\x7c\xe1\x54\xbb\x22\x2f\x16\x52\xaa\xd5\xbd\xee\xf3\xc8\x01\xbd\x36\x36\x8a\x31\x40\xe8\x96\xc3\xfc\x22\xc7\x8a\xf7\x07\x5b\xd4\x03\x5c\xf1\xde\x71\x0a\x92\x00\xd1\x11\x13\x9f\x6a\x2f\x95\xce\x2d\x99\x91\xa0\x2e\xee\x4d\x97\xea\x7a\x80\x90\xb0\x27\xcb\x09\x83\xda\x6f\x2d\x27\xb9\x6e\x79\xf5\xa4\xb2\xe4\xea\x3e\x5a\x98\x98\xd8\x42\x10\x31\x27\xaa\xa7\x72\xe8\x3e\x2a\x3c\x81\xd3\x84\x28\x18\x3c\xaa\x25\xb0\xc3\x69\x28\xc2\x2b\x85\xd4\x95\x7c\x07\x66\xa4\xf7\x67\x05\x6f\x4b\xd0\x0c\x12\xd0\x30\x06\x01\xfa\x4b\xb1\x5a\x1e\xeb\xae\x8a\x65\xf3\x89\x32\x2d\x4f\x37\xf7\x0c\xa0\x78\x62\xc0\xb6\x71\x01\x2f\x1e\x35\x84\x31\x59\xa6\xf5\x9b\xc5\x80\x4d\x1e\x16\x25\xd0\xe7\x73\x97\xf4\xf1\x81\x04\x46\x87\x9f\xb5\x4b\x6a\x94\x94\xe4\x22\x3b\x6c\x36\xe0\xeb\xff\x19\x45\x1c\xa8\x28\x20\x93\x59\x11\x18\x08\xfd\x2f\x57\xf8\x90\x08\x35\x84\x9c\x36\x2c\x97\x56\xe0\xd8\x9b\x6f\xfa\xe8\x4d\x05\x96\x29\xc1\x09\xbd\x85\x76\x68\x2b\x02\x17\xc6\xb8\xc0\xd0\xf1\x23\x59\x4e\x1f\x93\xae\x5f\xb3\x04\x1d\x31\x92\x5d\x51\x8e\x23\xec\xde\xa9\x96\x63\xde\x57\xb6\x9f\x5c\x06\x7a\xf8\x80\x3f\xef\x73\xbc\x62\x0c\x35\xe1\x99\x74\x39\x81\xc9\xe8\xae\x53\xa2\x04\x32\xa2\xfa\x62\x69\xae\x39\xcf\xb5\xe0\xc4\xd3\xce\x61\x3c\x64\x0e\x37\x32\x70\xf6\xce\x71\xdd\xd9\x61\x6e\x14\xac\x14\x6a\xd6\x92\x4a\x50\xa4\xae\xf5\x38\x61\x0c\x69\x52\xa6\xf7\xe5\xa9\x7e\x06\x5e\x3b\x18\xb3\x36\x98\x9f\x5c\xcf\x8b\xaa\x5d\x4b\x8f\xbe\x85\xd5\xf6\xa6\x58\x0a\xbe\xe7\xf6\xd2\xa2\x41\xf8\x9d\xdb\x23\x2d\x5b\xbe\xf0\x65\xf3\x3f\x13\x75\x79\xa3\x45\x80\x98\xf3\x7c\x06\x34\x4b\x97\x8e\x08\xd6\x58\x19\x44\x86\x18\x39\x4c\x37\xfa\x9d\x62\x44\x47\x0d\xb7\xde\x36\x6b\xdd\x51\x09\xdf\x4b\x65\xad\x37\x64\x4e\x79\x86\x44\xf3\x51\x70\x3d\xe7\x71\xd1\x27\xb6\x38\x9e\x02\x92\xd9\xe0\x7e\xa2\xd0\x0f\x8e\x4b\x41\x7e\x47\xa5\x7d\x1c\xae\x3c\xfc\xb3\x88\x03\x98\x6d\x51\xa9\x8f\x4a\xee\x3e\x94\x8b\xe6\xa7\x8e\xdf\x95\xba\xb2\xaf\x70\x80\x40\x01\xb9\xdd\x8d\x5d\x70\xb5\x16\xfd\xaa\x18\xc4\x20\xc6\x8d\x1d\x14\xfd\xaf\x42\xef\x22\x61\x53\xe6\xa5\x70\xe6\x18\x8a\x4d\xf9\x14\x8a\x53\xdc\xf9\xb7\xa4\xf0\x29\xdd\x32\x63\x2a\x1a\x3a\x85\xd5\x3b\x36\x18\x6f\x65\xd8\x16\x11\xd4\xf3\x70\x5f\x10\x6f\x6e\x4e\x0e\xa0\x5a\x7a\x0d\xd7\x91\x48\xd6\xb0\xad\x67\xcc\x95\xa0\xd0\xb9\x96\x32\x7c\x78\x72\x3d\x34\xee\xde\xba\x4e\xcb\xce\x6b\xd3\x79\x77\x29\xe3\x33\x46\x41\x61\x6f\xec\x33\xcf\xa8\x4c\xa8\x6f\x25\xe1\xc1\x14\x90\x4c\xae\xe4\xa9\xa7\xb1\x6f\x75\xa3\x1d\x07\x8b\x43\xe5\x52\x3e\x95\x62\x2c\x77\x37\xcc\x41\xae\x01\x5e\x43\x50\xf2\xe9\x2b\xc1\xe8\x5a\x4e\xb7\x2a\xdd\x1e\xf3\x32\x37\xa3\x41\xbf\xa1\x76\x0e\x16\xaf\xe8\x79\x6a\x72\x4c\x78\xcf\xc2\x13\x19\x53\xb6\xb1\x37\x96\x35\x88\xc6\x40\xd5\xa0\x8d\xcb\x0e\x90\xe8\x7d\xb7\x4f\xe8\x9f\xc5\x53\x43\xb5\xed\x45\x05\x00\x36\xb9\x25\x24\x0d\x4a\xcc\xd3\x23\x98\x86\x07\x07\xca\x40\xea\x25\x64\x16\xfa\xac\xe1\xd7\xf2\x7f\xa6\x7f\x4e\x3e\x39\x42\x34\x3c\x60\x9a\xb0\xeb\xd4\x82\x51\x37\x1d\xea\xd4\xde\xaa\x69\xed\x25\x72\xf8\x3d\xe7\x92\xe5\x44\xe5\xc8\xe8\x69\x21\x96\x30\x52\xd3\x56\xf2\x6d\xc4\x95\x1d\x5a\x6e\x21\x4d\xc2\xbf\xc7\x93\x69\x21\x96\x6f\x27\x95\x0f\xbf\x1f\x98\x16\x7c\xf8\x1f\xe0\xc1\xeb\x34\x58\xa6\xde\x44\xec\x0d\x8a\x6f\xdb\xf5\xf1\x9a\xb5\x57\xe5\x03\x74\xc3\x10\x23\xc0\x15\xe3\x82\xb3\x6b\x73\x15\x48\xf2\x98\x2a\xae\x59\x2d\xd6\x0d\x1b\x13\x0f\x28\x00\x08\x49\x83\x51\x96\x09\x14\x8d\xd7\xf4\x83\x87\x59\x54\x3b\x02\x3f\xf5\x6e\xd2\x55\xfb\xe5\xbd\x93\x45\x79\x92\x39\x98\xca\x09\x71\x21\x3f\x8d\xb2\x4d\xb0\x2d\xd5\xad\xc6\xef\x6d\xd2\xaa\x9f\x6b\x0e\x94\x26\xfc\x87\xc1\xca\x08\xbc\xd1\x56\x77\xd2\x0e\xa6\x4b\x93\x89\xc5\x4c\xca\x5b\x32\xa7\xe8\x72\x13\xf4\x25\x89\x66\x25\x8d\xf3\xa8\x39\x11\x82\x46\x9a\x68\x85\x4e\x57\xaf\xf0\x72\xe3\xe7\xea\x3f\x57\xcf\x3d\x31\xae\x1d\x87\x06\xbf\x39\x9a\xf4\x45\x6c\x99\x75\x48\x1f\xf1\xfe\x8c\x3c\xc1\x92\x17\x38\xa7\xa8\x7b\xcc\xc0\xc1\xa3\x10\xa2\x25\xcd\x05\x10\xd9\xd4\x29\x49\x36\xfa\x18\x49\x70\x35\x5f\x0f\xa6\xed\x1a\xda\x1e\x89\x11\x64\xde\x4a\x16\x29\x4a\x10\x5b\xd0\x51\xfc\xe6\x78\xf4\xe7\x21\x5d\x80\x1f\x95\x95\xa0\x68\x9c\xe8\x81\x9e\xab\xa2\x23\xfa\xaa\x69\x89\x7b\x57\x79\x48\x8e\x9f\x21\x24\x18\x1a\x13\xd0\x08\x93\x24\x38\x73\x80\x9b\x9b\x7b\x0b\x55\x89\x71\x14\x5d\xfd\xa7\x30\xb1\xd3\x6a\x75\xe5\xcc\x10\x78\x2d\x02\x19\xf3\x6a\x0b\xe4\x66\x69\x5e\x27\x5b\x8e\xf2\xc5\x8f\x4f\x05\x4e\x60\x88\x04\xc9\x13\xea\x8e\x5e\xbd\x06\x2f\xd5\xf2\xc1\x44\x20\x16\x1b\xdc\x77\x7d\x1b\x71\xa4\xc5\xe9\x1c\x70\x7d\x43\xea\xe7\x47\x26\xb8\xbe\xe0\x24\x5a\xf6\xfc\x32\x77\x41\x91\x86\xb7\xb1\x46\xb8\x50\xec\x6a\x37\x5b\xdb\x03\x48\xc2\x31\x4d\x9c\x7e\x0f\x94\x1d\x2d\x70\xdf\x33\x61\x01\xae\x06\xc9\x5b\x49\x9b\xbe\x21\x6b\xd1\xa2\x70\x7d\x04\xed\x82\xa8\xbd\x07\x64\x2d\x4d\x3a\xfe\xa0\xd9\x43\x6d\x2c\x08\xd1\x0b\x71\xd5\x41\x3e\x91\x96\x19\x6c\x7b\x6e\x6b\xb8\x0e\xcf\x77\xfd\xb8\xac\x40\x8e\x1a\x40\x71\x5b\xf5\xc3\xb6\xb5\x79\xce\x51\xa9\x80\x42\xd1\x2b\x94\x71\xb4\xf2\xbd\xdf\x29\x0c\xb5\xd9\xd5\x50\x06\xae\x1b\x54\x75\x6d\xde\x5a\xa1\x93\xa8\x9c\x83\xdc\x77\xb3\xf2\xa6\x97\x5b\x58\x99\x4f\x67\x51\x55\xbd\xa4\x1f\xe1\x62\x49\x4e\xc4\xa4\x91\x54\x9d\x9d\x4b\xa7\xc6\x77\x79\x1a\xc5\xfb\x0f\x74\x06\xd7\xc0\xc6\xd4\xe2\x14\x1d\xc6\xef\x56\xe7\x34\xc1\xbf\xab\x92\x95\x3d\x51\xc0\x5b\xfa\xcc\x2c\x9c\xac\x8e\xcc\x33\xea\x9e\x68\x5e\xb2\x08\x4b\xf6\xe3\x64\xa8\x91\xd1\x46\x27\xe4\xfd\x0a\x7c\x6d\x4a\x30\x49\x49\xd8\xbb\x79\x1e\x87\xe7\x2c\xe1\xc6\x1e\xee\x08\xa7\xdf\x08\x68\x36\xcd\x27\xb6\x28\x3c\x95\xe2\xf8\x29\x79\xa9\xbc\x10\xb6\x99\x1a\xe9\x38\x64\xda\x40\x33\x49\xdc\xd1\x02\x5d\x00\x10\xb2\x76\x80\xa1\xba\xd0\x74\xd9\x9b\x4c\x09\xe4\xc7\x1a\x39\xbf\xcb\x14\x1b\xa5\x33\x25\x76\x4b\x92\x41\x5c\x9d\x28\x34\x34\x46\xf3\x6d\xba\x25\xe6\x7b\xb6\x6d\xa9\x00\x0c\x8e\xe8\x57\x71\x4a\x60\xb3\x4d\xdd\x94\x08\xbd\xa9\xad\x16\xaf\xae\x06\x4d\xbd\x90\x77\x94\xdd\xb2\xd4\x11\xf0\xd6\x51\x53\x59\xc9\x19\xe6\x28\x1a\x04\xe3\x37\xcd\x32\xe3\x01\x18\xd3\x8a\x0c\x73\x20\x99\x42\x23\xd5\xed\x11\x39\x3c\x10\x7c\x09\xdc\x2b\xec\x82\xf5\xce\x80\x26\x5d\x69\x63\xbf\x81\x11\x9a\xe8\xea\x33\x90\x29\xf7\x6c\xa2\x67\xb5\xc3\x74\x09\x3b\x72\xf7\xb7\x6f\xf1\xa0\x06\xe4\x22\x1a\xe2\x2b\x42\x26\x98\xb7\x0e\x09\x32\xb0\xa9\xc5\xc3\xa5\x20\x13\x5c\xf1\x86\x1b\x1a\xdf\x7a\x0b\x7a\x26\x46\x26\xe0\xf8\xe9\x70\xa9\xdc\xc5\xf2\x34\x60\x80\x3c\x86\xb4\x19\x93\xad\x67\xc1\x62\x5f\x95\x2f\x01\x73\x52\xd7\xd5\x4e\xee\x68\xe9\xa9\xf7\x72\x20\x12\xfa\x30\x9b\x0f\x4a\xb6\xdc\x5c\x83\x3e\x7d\xf7\x1d\x8d\x17\x81\x43\x02\x9f\xc9\x16\xb0\xd7\x86\xa9\xbd\x46\x76\xe2\x08\xe2\x14\x0f\xa1\x49\xe8\x55\x25\x40\x7c\xe7\x77\xba\x90\x2a\x8f\xac\x73\xd6\xa1\x51\x53\xd3\xad\xee\x18\x2f\x05\x46\x11\xda\xa9\x48\xd5\x5e\xe5\x10\x5e\x60\xe5\x47\x4f\xbf\x20\x82\xb2\xda\x3d\xf0\xeb\x85\x6b\x97\xa5\xc9\xe1\xcc\x35\xca\x1e\x8f\x66\x17\x3a\x86\xe4\xcb\x8e\xe9\x04\xca\x16\xcf\x93\x3a\x92\x88\xc8\x47\x69\x67\xbc\x5c\xec\x72\x60\xde\x57\x11\x5a\xa0\x19\x12\x17\x73\xd9\x83\xfe\x92\xb0\x70\xe5\xae\x51\xbd\xa9\x11\xc7\xd9\xec\x86\xad\x7d\x62\x70\xec\x3f\x87\xa7\x78\x26\xe4\xac\xad\x67\xdf\xc6\xcb\x10\xcf\x3a\x08\xfc\x9e\x9a\xfc\x95\x64\x69\xb7\xfc\xc4\x5a\xbd\xf4\x51\x98\xee\xc2\x34\x1c\x76\x69\xa1\xa8\x13\x58\xe2\x39\xda\xfb\x05\x62\x02\x33\x81\x40\xed\x7d\x22\x52\xa3\xb9\xeb\xa4\xf6\x85\x3a\xfd\xff\x10\x15\xa1\xc3\xed\xcc\xf5\xd0\x8b\x5f\x3e\x66\xb3\x89\x90\x1e\xbb\xac\x55\x8b\xc4\x70\x28\x57\x2f\xbd\x69\x2b\x1e\x29\x07\x72\xbe\xf8\x14\x1b\xc1\x78\x30\xd2\x88\xe5\x09\x1d\x28\xe8\x5d\xf0\xee\x8a\xb3\x6c\xea\xc2\x3b\xa9\x8a\x12\x5f\xac\xea\xac\x9a\x9d\xaa\x3a\x57\xb0\x81\x84\xc6\x6c\xc5\xcd\x5a\x04\xec\xe7\x50\x43\x72\x04\x50\xb8\x5c\x77\xa0\x71\xd3\x7f\x11\x8b\x9b\xa5\x94\xb7\x91\xe7\x71\x22\xa7\x74\x73\x05\x55\xfb\x66\x04\x02\x48\x5c\x0a\x4b\x73\xff\xdf\x26\x80\x95\x1c\x9e\x85\x1d\x09\xa6\x47\x96\x84\x8b\xa2\xf3\x6e\x8e\x07\x7d\x8a\x02\xb2\x28\xe7\x05\x82\xe1\xc7\x28\x5a\x92\x9e\x40\x1e\x2b\x30\x6f\xae\x60\x35\x27\x86\x13\x4b\x00\x0f\x28\x4e\xfd\x14\x13\x96\xe9\x05\x7d\x60\xd4\x55\xe5\x01\xfe\xf0\xe9\x20\x73\x03\x9b\x04\xeb\xc2\xcf\x23\xda\x38\xef\x9a\x4d\x07\x06\xa8\xcc\xb7\xf4\x70\x88\xcb\x6b\xbb\x4c\xb4\x26\x32\x7b\xf3\x4b\x36\x7f\x19\x77\x70\x86\x65\x56\x57\x96\x80\xf7\xc9\x3c\xa2\xcb\xd2\xb3\x04\xd5\xbb\xb6\x90\x1e\x0d\x1c\xce\x5f\x62\x40\x85\x26\xe4\x03\x40\x55\xd7\xd8\xa6\x36\xaf\xe9\xe6\x4f\x66\xcc\xaa\x55\x89\xb1\xdd\xb6\x98\x66\x98\x2d\xdd\xe5\xda\x17\x94\x3e\xa2\xdb\x92\xed\x54\xc1\x86\x9e\x66\x29\xd2\x25\x73\xcb\x8b\x3a\xd4\x1f\xa8\xb3\x37\xa3\x88\xb9\x98\xa0\x6b\xbe\x8b\xfe\x2c\x21\x6e\xb8\x7c\x26\x38\xe4\x8b\xec\xae\x5d\x1d\xe9\x22\x7a\xad\xe4\x6a\x77\xba\xd3\x4b\x75\x6d\xd5\xb0\x50\x59\x7f\x10\x1f\x47\xc7\x23\x9b\x2c\xbb\x0e\x92\x3f\x15\xa3\x09\x15\x97\x26\xbc\x74\xf2\x8d\x25\x3c\xd1\x15\x28\x42\x45\x5a\xe7\x22\x35\xd7\x4a\x9e\x4c\x89\x71\x68\xe4\x8c\x3b\x24\x01\x9c\xd7\x2a\x24\xbe\xc7\x61\x18\x8f\x0a\x09\x36\x3e\xf8\x91\xdb\x27\xee\x71\x78\x31\x0b\xe2\x45\xda\x7e\xd3\xab\xad\xaf\x68\xc9\x1e\x20\x54\xab\xc7\x0c\xa0\xa7\x43\x38\x23\xb8\xfd\x75\x95\x2c\xe1\x1a\x00\x18\x78\x1e\x7b\xbf\x90\x81\x97\x66\x27\x6c\xf5\x1c\x62\x80\xc6\x99\x9a\x83\x90\x66\x38\x3a\x8c\xd7\x67\x49\x82\x91\xaf\x88\x9c\xd9\x5e\x86\x47\x21\x6f\xab\x9d\xbe\x77\xec\x40\x77\xc6\x4c\xf6\x77\xbf\x97\x0a\x50\x52\xa3\x7d\x84\x81\x0d\xe7\x08\x32\xc2\xdb\xdd\x25\x2c\x81\xd6\xae\x92\x0c\x01\x82\x73\x80\x41\x6c\x29\xcd\x5d\x7c\x56\xde\x52\x4b\x1e\x65\xa8\xe7\xe9\x59\x03\x70\x1d\xfd\x50\x8b\x97\x97\x59\x47\xf2\xd5\xd7\x85\x2b\xcb\x12\x3e\xb7\x04\x34\x05\x88\xa2\x7e\x08\x1a\x2a\x65\xf3\xb7\x76\xea\xf2\x71\x9b\xe7\xd5\x23\xb7\xbf\x06\xd8\xe7\x42\x61\x0a\xb3\x76\xed\x4f\x73\x4d\x82\x9f\x96\x18\xea\xfb\xda\x2a\x3f\x64\x31\x21\x92\xf8\x6d\xbb\x4f\x60\xa6\x3a\x38\xbe\xeb\xc0\x69\x2a\x8c\x9f\xa6\xb6\xf5\x2e\x76\xc1\x45\x2a\xbe\x4a\xa6\x8b\x15\x7a\x3e\xc7\xe5\xc7\x04\x3f\x0d\x12\x22\x05\x3c\xbf\x0e\xa3\x0a\xd5\xdf\xd8\xc1\xf9\x08\x9c\xfc\x6d\x34\x9b\xbe\x1e\xea\xe2\x3b\x7b\x24\x6c\xa5\x83\xde\x33\x06\xc4\x16\x9c\x04\x43\x27\xdb\xa7\xb0\xa1\x0e\x32\x42\x74\xc3\x46\x47\xa2\x74\xb8\x7d\x48\x00\x44\xc9\xa2\x0c\x2b\x9f\xf4\x55\x2a\x84\x13\x2b\x03\x1e\x29\xec\x84\x31\x7f\x01\x45\x68\xaa\xdc\xb0\x3c\x16\x73\x87\x86\xb7\x06\x24\x0a\x69\xf4\xb4\x7d\x3e\x63\x0b\xa6\x8b\xb7\x41\xce\x9e\x10\x04\xfc\xb3\x4b\xb3\x46', 1)
| 13,463
| 13,463
| 0.749981
| 3,361
| 13,463
| 3.000595
| 0.077655
| 0.001785
| 0.001785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.320505
| 0.000223
| 13,463
| 1
| 13,463
| 13,463
| 0.428752
| 0
| 0
| 0
| 0
| 1
| 0.997029
| 0.997029
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6360b93958515b92a0980d33ca49d6f0e073346a
| 54
|
py
|
Python
|
addons14/project_task_default_stage/models/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/project_task_default_stage/models/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/project_task_default_stage/models/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import project
from . import project_task_type
| 18
| 31
| 0.814815
| 8
| 54
| 5.25
| 0.625
| 0.47619
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 2
| 32
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
636a6e179b723b89ea7374c291d8449202cdc334
| 131
|
py
|
Python
|
models/generators/__init__.py
|
Cuijie12358/Semantic-Segmentation-using-Adversarial-Networks
|
0dbded0a3c545d7aba45a34acdde1e5683af16da
|
[
"MIT"
] | 110
|
2017-04-21T13:02:56.000Z
|
2021-12-06T03:34:46.000Z
|
models/generators/__init__.py
|
Cuijie12358/Semantic-Segmentation-using-Adversarial-Networks
|
0dbded0a3c545d7aba45a34acdde1e5683af16da
|
[
"MIT"
] | 7
|
2017-07-25T10:39:04.000Z
|
2019-02-28T08:06:39.000Z
|
models/generators/__init__.py
|
Cuijie12358/Semantic-Segmentation-using-Adversarial-Networks
|
0dbded0a3c545d7aba45a34acdde1e5683af16da
|
[
"MIT"
] | 43
|
2017-04-19T12:36:41.000Z
|
2021-06-16T08:04:49.000Z
|
from models.generators.fcn32s import FCN32s
from models.generators.fcn16s import FCN16s
from models.generators.fcn8s import FCN8s
| 26.2
| 43
| 0.854962
| 18
| 131
| 6.222222
| 0.388889
| 0.267857
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 0.099237
| 131
| 4
| 44
| 32.75
| 0.864407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6374d82ce8cf901c3f698dac0a51ab64d5436716
| 3,052
|
py
|
Python
|
breed_routes.py
|
tiagofsv95/trabalho_final
|
202aa26adeae4ec0eb3777c07fadce9a61352490
|
[
"MIT"
] | null | null | null |
breed_routes.py
|
tiagofsv95/trabalho_final
|
202aa26adeae4ec0eb3777c07fadce9a61352490
|
[
"MIT"
] | null | null | null |
breed_routes.py
|
tiagofsv95/trabalho_final
|
202aa26adeae4ec0eb3777c07fadce9a61352490
|
[
"MIT"
] | null | null | null |
from flask import jsonify
from flask import make_response
import sqlite3
from sqlite3 import Error
import os
dirname = os.path.dirname(__file__)
database_dirname = dirname + '/database/adote_um_cao.db'
def get_all_breed():
try:
conn = sqlite3.connect(database_dirname)
sql = '''SELECT * FROM Raca'''
cur = conn.cursor()
cur.execute(sql)
registros = cur.fetchall()
if registros:
names = [description[0] for description in cur.description]
json_data = []
for reg in registros:
json_data.append(dict(zip(names, reg)))
resp = make_response(jsonify(json_data), 200)
return resp
else:
resp = make_response(jsonify({'mensagem': 'Registro não encontrado.'}), 204)
return resp
except Error as e:
resp = make_response(jsonify({'mensagem': e}), 500)
return resp
finally:
conn.close()
# 1. Buscar usuario pelo id
def get_breed_by_id(idbreed=None):
if idbreed == None:
resp = make_response(jsonify({'mensagem': 'Parametro id raça invalido.'}), 400)
return resp
else:
try:
conn = sqlite3.connect(database_dirname)
sql = '''SELECT * FROM Raca WHERE id = ''' + '"' + idbreed + '"'
cur = conn.cursor()
cur.execute(sql)
registro = cur.fetchone()
if registro:
names = [description[0] for description in cur.description]
json_data = [dict(zip(names, registro))]
resp = make_response(jsonify(json_data), 200)
return resp
else:
resp = make_response(jsonify({'mensagem': 'Registro não encontrado.'}), 204)
return resp
except Error as e:
resp = make_response(jsonify({'mensagem': e}), 500)
return resp
finally:
conn.close()
# 1. Buscar usuario pelo id
def get_breed_by_size(sizeId=None):
if sizeId == None:
resp = make_response(jsonify({'mensagem': 'Parametro porteId da raça invalido.'}), 400)
return resp
else:
try:
conn = sqlite3.connect(database_dirname)
sql = '''SELECT * FROM Raca WHERE porteId = ''' + '"' + sizeId + '"'
cur = conn.cursor()
cur.execute(sql)
registros = cur.fetchall()
if registros:
names = [description[0] for description in cur.description]
json_data = []
for reg in registros:
json_data.append(dict(zip(names, reg)))
resp = make_response(jsonify(json_data), 200)
return resp
else:
resp = make_response(jsonify({'mensagem': 'Registro não encontrado.'}), 204)
return resp
except Error as e:
resp = make_response(jsonify({'mensagem': e}), 500)
return resp
finally:
conn.close()
| 30.217822
| 95
| 0.549803
| 327
| 3,052
| 5.015291
| 0.235474
| 0.087805
| 0.107317
| 0.154268
| 0.818293
| 0.818293
| 0.802439
| 0.74878
| 0.74878
| 0.74878
| 0
| 0.021532
| 0.345675
| 3,052
| 101
| 96
| 30.217822
| 0.7997
| 0.01671
| 0
| 0.746835
| 0
| 0
| 0.103368
| 0.008336
| 0.025316
| 0
| 0
| 0
| 0
| 1
| 0.037975
| false
| 0
| 0.063291
| 0
| 0.240506
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9830f2fa46cfc09c9ff13fcbdcc9a9bcbc73bf46
| 116
|
py
|
Python
|
python/compose.py
|
tcooling/scripts
|
d342cb2537e1ccf158660188714a42f3cc6680a9
|
[
"MIT"
] | 1
|
2020-05-23T20:15:27.000Z
|
2020-05-23T20:15:27.000Z
|
python/compose.py
|
tcooling/scripts
|
d342cb2537e1ccf158660188714a42f3cc6680a9
|
[
"MIT"
] | null | null | null |
python/compose.py
|
tcooling/scripts
|
d342cb2537e1ccf158660188714a42f3cc6680a9
|
[
"MIT"
] | null | null | null |
from functools import reduce
def compose(*fns):
return reduce(lambda g, f: lambda x: f(g(x)), fns, lambda x: x)
| 29
| 67
| 0.681034
| 21
| 116
| 3.761905
| 0.571429
| 0.177215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181034
| 116
| 4
| 67
| 29
| 0.831579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
9863a115a8ad7718bbed0fe2f6e31a97ba53f4a4
| 25,996
|
py
|
Python
|
boa3_test/tests/compiler_tests/test_relational.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/tests/compiler_tests/test_relational.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/tests/compiler_tests/test_relational.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.boa3 import Boa3
from boa3.exception import CompilerError
from boa3.neo.vm.opcode.Opcode import Opcode
from boa3.neo.vm.type.Integer import Integer
from boa3_test.tests.boa_test import BoaTest
from boa3_test.tests.test_classes.TestExecutionException import TestExecutionException
from boa3_test.tests.test_classes.testengine import TestEngine
class TestRelational(BoaTest):
default_folder: str = 'test_sc/relational_test'
def test_number_equality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.NUMEQUAL
+ Opcode.RET
)
path = self.get_contract_path('NumEquality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 2)
self.assertEqual(True, result)
def test_number_inequality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.NUMNOTEQUAL
+ Opcode.RET
)
path = self.get_contract_path('NumInequality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 2)
self.assertEqual(False, result)
def test_number_inequality_operation_2(self):
path = self.get_contract_path('NumInequalityPython2.py')
with self.assertRaises(SyntaxError):
output = Boa3.compile(path)
def test_number_less_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.LT
+ Opcode.RET
)
path = self.get_contract_path('NumLessThan.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 2)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 1)
self.assertEqual(False, result)
def test_number_less_or_equal_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.LE
+ Opcode.RET
)
path = self.get_contract_path('NumLessOrEqual.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 2)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 1)
self.assertEqual(False, result)
def test_number_greater_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.GT
+ Opcode.RET
)
path = self.get_contract_path('NumGreaterThan.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 2)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 1)
self.assertEqual(True, result)
def test_number_greater_or_equal_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.GE
+ Opcode.RET
)
path = self.get_contract_path('NumGreaterOrEqual.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 2)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 1)
self.assertEqual(True, result)
def test_number_identity_operation(self):
path = self.get_contract_path('NumIdentity.py')
engine = TestEngine()
a = 1
b = 1
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution_true')
self.assertEqual(expected_result, result)
a = 1
b = 2
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution_false')
self.assertEqual(expected_result, result)
c = 1
d = c
expected_result = c is d
result = self.run_smart_contract(engine, path, 'with_attribution')
self.assertEqual(expected_result, result)
def test_number_not_identity_operation(self):
path = self.get_contract_path('NumNotIdentity.py')
engine = TestEngine()
a = 1
b = 2
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution_true')
self.assertEqual(expected_result, result)
a = 1
b = 1
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution_false')
self.assertEqual(expected_result, result)
c = 1
d = c
expected_result = c is not d
result = self.run_smart_contract(engine, path, 'with_attribution')
self.assertEqual(expected_result, result)
def test_boolean_equality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.NUMEQUAL
+ Opcode.RET
)
path = self.get_contract_path('BoolEquality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', True, False)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', True, True)
self.assertEqual(True, result)
def test_boolean_inequality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.NUMNOTEQUAL
+ Opcode.RET
)
path = self.get_contract_path('BoolInequality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', True, False)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', True, True)
self.assertEqual(False, result)
def test_boolean_identity_operation(self):
path = self.get_contract_path('BoolIdentity.py')
engine = TestEngine()
a = True
b = True
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution_true')
self.assertEqual(expected_result, result)
a = True
b = False
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution_false')
self.assertEqual(expected_result, result)
c = True
d = c
expected_result = c is d
result = self.run_smart_contract(engine, path, 'with_attribution')
self.assertEqual(expected_result, result)
def test_boolean_not_identity_operation(self):
path = self.get_contract_path('BoolNotIdentity.py')
engine = TestEngine()
a = True
b = False
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution_true')
self.assertEqual(expected_result, result)
a = True
b = True
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution_false')
self.assertEqual(expected_result, result)
c = True
d = c
expected_result = c is not d
result = self.run_smart_contract(engine, path, 'with_attribution')
self.assertEqual(expected_result, result)
def test_multiple_comparisons(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x03'
+ Opcode.LDARG1
+ Opcode.LDARG0
+ Opcode.LE
+ Opcode.LDARG0
+ Opcode.LDARG2
+ Opcode.LE
+ Opcode.BOOLAND
+ Opcode.RET
)
path = self.get_contract_path('NumRange.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 2, 5)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 1, 5)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 5, 1, 2)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 2, 5, 1)
self.assertEqual(False, result)
def test_string_equality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.EQUAL
+ Opcode.RET
)
path = self.get_contract_path('StrEquality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'test')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'unit')
self.assertEqual(True, result)
def test_string_inequality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.NOTEQUAL
+ Opcode.RET
)
path = self.get_contract_path('StrInequality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'test')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'unit')
self.assertEqual(False, result)
def test_string_less_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.LT
+ Opcode.RET
)
path = self.get_contract_path('StrLessThan.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 'test', 'unit')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'unit')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'test')
self.assertEqual(True, result)
def test_string_less_or_equal_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.LE
+ Opcode.RET
)
path = self.get_contract_path('StrLessOrEqual.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 'test', 'unit')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'unit')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'test')
self.assertEqual(True, result)
def test_string_greater_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.GT
+ Opcode.RET
)
path = self.get_contract_path('StrGreaterThan.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 'test', 'unit')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'unit')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'test')
self.assertEqual(False, result)
def test_string_greater_or_equal_than_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.GE
+ Opcode.RET
)
path = self.get_contract_path('StrGreaterOrEqual.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 'test', 'unit')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'unit')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 'unit', 'test')
self.assertEqual(False, result)
def test_string_identity_operation(self):
path = self.get_contract_path('StrIdentity.py')
engine = TestEngine()
a = 'unit'
b = 'unit'
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution_true')
self.assertEqual(expected_result, result)
a = 'unit'
b = 'test'
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution_false')
self.assertEqual(expected_result, result)
c = 'unit'
d = c
expected_result = c is d
result = self.run_smart_contract(engine, path, 'with_attribution')
self.assertEqual(expected_result, result)
def test_string_not_identity_operation(self):
path = self.get_contract_path('StrNotIdentity.py')
engine = TestEngine()
a = 'unit'
b = 'test'
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution_true')
self.assertEqual(expected_result, result)
a = 'unit'
b = 'unit'
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution_false')
self.assertEqual(expected_result, result)
c = 'unit'
d = c
expected_result = c is not d
result = self.run_smart_contract(engine, path, 'with_attribution')
self.assertEqual(expected_result, result)
def test_mixed_equality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.EQUAL
+ Opcode.RET
)
path = self.get_contract_path('MixedEquality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 'unit')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', 123, '123')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'Main', Integer.from_bytes(b'123'), '123')
self.assertEqual(False, result)
def test_mixed_inequality_operation(self):
expected_output = (
Opcode.INITSLOT
+ b'\x00'
+ b'\x02'
+ Opcode.LDARG0
+ Opcode.LDARG1
+ Opcode.NOTEQUAL
+ Opcode.RET
)
path = self.get_contract_path('MixedInequality.py')
output = Boa3.compile(path)
self.assertEqual(expected_output, output)
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'Main', 1, 'unit')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', 123, '123')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'Main', Integer.from_bytes(b'123'), '123')
self.assertEqual(True, result)
def test_mixed_less_than_operation(self):
path = self.get_contract_path('MixedLessThan.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_mixed_less_or_equal_than_operation(self):
path = self.get_contract_path('MixedLessOrEqual.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_mixed_greater_than_operation(self):
path = self.get_contract_path('MixedGreaterThan.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_mixed_greater_or_equal_than_operation(self):
path = self.get_contract_path('MixedGreaterOrEqual.py')
self.assertCompilerLogs(CompilerError.MismatchedTypes, path)
def test_mixed_identity(self):
path = self.get_contract_path('MixedIdentity.py')
engine = TestEngine()
# a mixed identity should always result in False, but will compile
result = self.run_smart_contract(engine, path, 'mixed', expected_result_type=bool)
self.assertEqual(False, result)
def test_list_equality_with_slice(self):
path = self.get_contract_path('ListEqualityWithSlice.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'main', ['unittest', '123'], 'unittest',
expected_result_type=bool)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', ['unittest', '123'], '123',
expected_result_type=bool)
self.assertEqual(False, result)
with self.assertRaises(TestExecutionException):
self.run_smart_contract(engine, path, 'main', [], '')
def test_list_identity(self):
path = self.get_contract_path('ListIdentity.py')
engine = TestEngine()
a = [1, 2, 3]
b = a
expected_result = a is b
result = self.run_smart_contract(engine, path, 'with_attribution', expected_result_type=bool)
self.assertEqual(expected_result, result)
a = [1, 2, 3]
b = [1, 2, 3]
expected_result = a is b
result = self.run_smart_contract(engine, path, 'without_attribution', expected_result_type=bool)
self.assertEqual(expected_result, result)
def test_list_not_identity(self):
path = self.get_contract_path('ListNotIdentity.py')
engine = TestEngine()
a = [1, 2, 3]
b = a
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'with_attribution', expected_result_type=bool)
self.assertEqual(expected_result, result)
a = [1, 2, 3]
b = [1, 2, 3]
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'without_attribution', expected_result_type=bool)
self.assertEqual(expected_result, result)
def test_compare_same_value_hard_coded(self):
path = self.get_contract_path('CompareSameValueHardCoded.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'testing_something',
expected_result_type=bool)
self.assertEqual(True, result)
def test_compare_same_value_argument(self):
path = self.get_contract_path('CompareSameValueArgument.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'testing_something', bytes(20),
expected_result_type=bool)
self.assertEqual(True, result)
def test_compare_string(self):
path = self.get_contract_path('CompareString.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'test1', '|',
expected_result_type=bool)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'test2', '|',
expected_result_type=bool)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'test3', '|',
expected_result_type=bool)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'test4', '|',
expected_result_type=bool)
self.assertEqual(True, result)
def test_boa2_equality_test2(self):
path = self.get_contract_path('Equality2Boa2Test.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'main', 1)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', 2)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', 3)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', 4)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', 5)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', 6)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', 7)
self.assertEqual(False, result)
def test_none_identity_operation(self):
path = self.get_contract_path('NoneIdentity.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'main', 1)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', True)
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', 'string')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', b'bytes')
self.assertEqual(False, result)
result = self.run_smart_contract(engine, path, 'main', None)
self.assertEqual(True, result)
def test_none_not_identity_operation(self):
path = self.get_contract_path('NoneNotIdentity.py')
engine = TestEngine()
result = self.run_smart_contract(engine, path, 'main', 1)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', True)
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', 'string')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', b'bytes')
self.assertEqual(True, result)
result = self.run_smart_contract(engine, path, 'main', None)
self.assertEqual(False, result)
def test_tuple_identity(self):
path = self.get_contract_path('TupleIdentity.py')
engine = TestEngine()
a = (1, 2, 3)
b = a
expected_result = a is b
result = self.run_smart_contract(engine, path, 'with_attribution', expected_result_type=bool)
self.assertEqual(expected_result, result)
# Python will try conserve memory and will make a and b reference the same position, since Tuples are immutable
# this will deviate from Neo's expected behaviour
result = self.run_smart_contract(engine, path, 'without_attribution', expected_result_type=bool)
self.assertEqual(False, result)
def test_tuple_not_identity(self):
path = self.get_contract_path('TupleNotIdentity.py')
engine = TestEngine()
a = (1, 2, 3)
b = a
expected_result = a is not b
result = self.run_smart_contract(engine, path, 'with_attribution', expected_result_type=bool)
self.assertEqual(expected_result, result)
# Python will try conserve memory and will make a and b reference the same position, since Tuples are immutable
# this will deviate from Neo's expected behaviour
result = self.run_smart_contract(engine, path, 'without_attribution', expected_result_type=bool)
self.assertEqual(True, result)
| 35.465211
| 119
| 0.612402
| 2,900
| 25,996
| 5.285172
| 0.061034
| 0.112546
| 0.07751
| 0.129184
| 0.911463
| 0.911463
| 0.879233
| 0.857898
| 0.825602
| 0.804463
| 0
| 0.014269
| 0.285582
| 25,996
| 732
| 120
| 35.513661
| 0.811006
| 0.014618
| 0
| 0.781302
| 0
| 0
| 0.076689
| 0.01722
| 0
| 0
| 0
| 0
| 0.202003
| 1
| 0.066778
| false
| 0
| 0.011686
| 0
| 0.081803
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98ad9540341302a19bda916ae8289dabda669407
| 202,664
|
py
|
Python
|
pyloxi3/loxi/of15/common.py
|
floodlight/loxigen-artifacts
|
1822ec984cb6da342bbaa381677071cbbe53cee6
|
[
"Apache-2.0"
] | 1
|
2017-06-01T09:41:07.000Z
|
2017-06-01T09:41:07.000Z
|
pyloxi3/loxi/of15/common.py
|
floodlight/loxigen-artifacts
|
1822ec984cb6da342bbaa381677071cbbe53cee6
|
[
"Apache-2.0"
] | 2
|
2017-07-03T08:50:56.000Z
|
2018-03-12T16:16:19.000Z
|
pyloxi3/loxi/of15/common.py
|
floodlight/loxigen-artifacts
|
1822ec984cb6da342bbaa381677071cbbe53cee6
|
[
"Apache-2.0"
] | 20
|
2015-02-16T15:23:04.000Z
|
2022-03-15T20:06:10.000Z
|
# Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
from . import util
import functools
import loxi.generic_util
import sys
ofp = sys.modules['loxi.of15']
class bsn_controller_connection(loxi.OFObject):
def __init__(self, state=None, auxiliary_id=None, role=None, uri=None):
if state != None:
self.state = state
else:
self.state = 0
if auxiliary_id != None:
self.auxiliary_id = auxiliary_id
else:
self.auxiliary_id = 0
if role != None:
self.role = role
else:
self.role = 0
if uri != None:
self.uri = uri
else:
self.uri = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.state))
packed.append(struct.pack("!B", self.auxiliary_id))
packed.append(b'\x00' * 2)
packed.append(struct.pack("!L", self.role))
packed.append(struct.pack("!256s", self.uri.encode()))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_controller_connection()
obj.state = reader.read("!B")[0]
obj.auxiliary_id = reader.read("!B")[0]
reader.skip(2)
obj.role = reader.read("!L")[0]
obj.uri = reader.read("!256s")[0].decode().rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.state != other.state: return False
if self.auxiliary_id != other.auxiliary_id: return False
if self.role != other.role: return False
if self.uri != other.uri: return False
return True
def pretty_print(self, q):
q.text("bsn_controller_connection {")
with q.group():
with q.indent(2):
q.breakable()
q.text("state = ");
value_name_map = {0: 'OFP_BSN_CONTROLLER_CONNECTION_STATE_DISCONNECTED', 1: 'OFP_BSN_CONTROLLER_CONNECTION_STATE_CONNECTED'}
if self.state in value_name_map:
q.text("%s(%d)" % (value_name_map[self.state], self.state))
else:
q.text("%#x" % self.state)
q.text(","); q.breakable()
q.text("auxiliary_id = ");
q.text("%#x" % self.auxiliary_id)
q.text(","); q.breakable()
q.text("role = ");
value_name_map = {0: 'OFPCR_ROLE_NOCHANGE', 1: 'OFPCR_ROLE_EQUAL', 2: 'OFPCR_ROLE_MASTER', 3: 'OFPCR_ROLE_SLAVE'}
if self.role in value_name_map:
q.text("%s(%d)" % (value_name_map[self.role], self.role))
else:
q.text("%#x" % self.role)
q.text(","); q.breakable()
q.text("uri = ");
q.pp(self.uri)
q.breakable()
q.text('}')
class bsn_debug_counter_desc_stats_entry(loxi.OFObject):
def __init__(self, counter_id=None, name=None, description=None):
if counter_id != None:
self.counter_id = counter_id
else:
self.counter_id = 0
if name != None:
self.name = name
else:
self.name = ""
if description != None:
self.description = description
else:
self.description = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.counter_id))
packed.append(struct.pack("!64s", self.name.encode()))
packed.append(struct.pack("!256s", self.description.encode()))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_debug_counter_desc_stats_entry()
obj.counter_id = reader.read("!Q")[0]
obj.name = reader.read("!64s")[0].decode().rstrip("\x00")
obj.description = reader.read("!256s")[0].decode().rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.counter_id != other.counter_id: return False
if self.name != other.name: return False
if self.description != other.description: return False
return True
def pretty_print(self, q):
q.text("bsn_debug_counter_desc_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("counter_id = ");
q.text("%#x" % self.counter_id)
q.text(","); q.breakable()
q.text("name = ");
q.pp(self.name)
q.text(","); q.breakable()
q.text("description = ");
q.pp(self.description)
q.breakable()
q.text('}')
class bsn_debug_counter_stats_entry(loxi.OFObject):
def __init__(self, counter_id=None, value=None):
if counter_id != None:
self.counter_id = counter_id
else:
self.counter_id = 0
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.counter_id))
packed.append(struct.pack("!Q", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_debug_counter_stats_entry()
obj.counter_id = reader.read("!Q")[0]
obj.value = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.counter_id != other.counter_id: return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("bsn_debug_counter_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("counter_id = ");
q.text("%#x" % self.counter_id)
q.text(","); q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
class bsn_flow_checksum_bucket_stats_entry(loxi.OFObject):
def __init__(self, checksum=None):
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.checksum))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_flow_checksum_bucket_stats_entry()
obj.checksum = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.checksum != other.checksum: return False
return True
def pretty_print(self, q):
q.text("bsn_flow_checksum_bucket_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("checksum = ");
q.text("%#x" % self.checksum)
q.breakable()
q.text('}')
class bsn_generic_stats_entry(loxi.OFObject):
def __init__(self, tlvs=None):
if tlvs != None:
self.tlvs = tlvs
else:
self.tlvs = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(loxi.generic_util.pack_list(self.tlvs))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_generic_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.tlvs = loxi.generic_util.unpack_list(reader, ofp.bsn_tlv.bsn_tlv.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.tlvs != other.tlvs: return False
return True
def pretty_print(self, q):
q.text("bsn_generic_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("tlvs = ");
q.pp(self.tlvs)
q.breakable()
q.text('}')
class bsn_gentable_bucket_stats_entry(loxi.OFObject):
def __init__(self, checksum=None):
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
return
def pack(self):
packed = []
packed.append(util.pack_checksum_128(self.checksum))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_bucket_stats_entry()
obj.checksum = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.checksum != other.checksum: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_bucket_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.breakable()
q.text('}')
class bsn_gentable_desc_stats_entry(loxi.OFObject):
def __init__(self, table_id=None, name=None, buckets_size=None, max_entries=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if name != None:
self.name = name
else:
self.name = ""
if buckets_size != None:
self.buckets_size = buckets_size
else:
self.buckets_size = 0
if max_entries != None:
self.max_entries = max_entries
else:
self.max_entries = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", self.table_id))
packed.append(struct.pack("!32s", self.name.encode()))
packed.append(struct.pack("!L", self.buckets_size))
packed.append(struct.pack("!L", self.max_entries))
packed.append(b'\x00' * 4)
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_desc_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.table_id = reader.read("!H")[0]
obj.name = reader.read("!32s")[0].decode().rstrip("\x00")
obj.buckets_size = reader.read("!L")[0]
obj.max_entries = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.name != other.name: return False
if self.buckets_size != other.buckets_size: return False
if self.max_entries != other.max_entries: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_desc_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("name = ");
q.pp(self.name)
q.text(","); q.breakable()
q.text("buckets_size = ");
q.text("%#x" % self.buckets_size)
q.text(","); q.breakable()
q.text("max_entries = ");
q.text("%#x" % self.max_entries)
q.breakable()
q.text('}')
class bsn_gentable_entry_desc_stats_entry(loxi.OFObject):
def __init__(self, checksum=None, key=None, value=None):
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
if key != None:
self.key = key
else:
self.key = []
if value != None:
self.value = value
else:
self.value = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", 0)) # placeholder for key_length at index 1
packed.append(util.pack_checksum_128(self.checksum))
packed.append(loxi.generic_util.pack_list(self.key))
packed[1] = struct.pack("!H", len(packed[-1]))
packed.append(loxi.generic_util.pack_list(self.value))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_desc_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
_key_length = reader.read("!H")[0]
obj.checksum = util.unpack_checksum_128(reader)
obj.key = loxi.generic_util.unpack_list(reader.slice(_key_length), ofp.bsn_tlv.bsn_tlv.unpack)
obj.value = loxi.generic_util.unpack_list(reader, ofp.bsn_tlv.bsn_tlv.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.checksum != other.checksum: return False
if self.key != other.key: return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_desc_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.text(","); q.breakable()
q.text("key = ");
q.pp(self.key)
q.text(","); q.breakable()
q.text("value = ");
q.pp(self.value)
q.breakable()
q.text('}')
class bsn_gentable_entry_stats_entry(loxi.OFObject):
def __init__(self, key=None, stats=None):
if key != None:
self.key = key
else:
self.key = []
if stats != None:
self.stats = stats
else:
self.stats = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", 0)) # placeholder for key_length at index 1
packed.append(loxi.generic_util.pack_list(self.key))
packed[1] = struct.pack("!H", len(packed[-1]))
packed.append(loxi.generic_util.pack_list(self.stats))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_entry_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
_key_length = reader.read("!H")[0]
obj.key = loxi.generic_util.unpack_list(reader.slice(_key_length), ofp.bsn_tlv.bsn_tlv.unpack)
obj.stats = loxi.generic_util.unpack_list(reader, ofp.bsn_tlv.bsn_tlv.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.key != other.key: return False
if self.stats != other.stats: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_entry_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("key = ");
q.pp(self.key)
q.text(","); q.breakable()
q.text("stats = ");
q.pp(self.stats)
q.breakable()
q.text('}')
class bsn_gentable_stats_entry(loxi.OFObject):
def __init__(self, table_id=None, entry_count=None, checksum=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if entry_count != None:
self.entry_count = entry_count
else:
self.entry_count = 0
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.table_id))
packed.append(b'\x00' * 2)
packed.append(struct.pack("!L", self.entry_count))
packed.append(util.pack_checksum_128(self.checksum))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_gentable_stats_entry()
obj.table_id = reader.read("!H")[0]
reader.skip(2)
obj.entry_count = reader.read("!L")[0]
obj.checksum = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.entry_count != other.entry_count: return False
if self.checksum != other.checksum: return False
return True
def pretty_print(self, q):
q.text("bsn_gentable_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("entry_count = ");
q.text("%#x" % self.entry_count)
q.text(","); q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.breakable()
q.text('}')
class bsn_interface(loxi.OFObject):
def __init__(self, hw_addr=None, name=None, ipv4_addr=None, ipv4_netmask=None):
if hw_addr != None:
self.hw_addr = hw_addr
else:
self.hw_addr = [0,0,0,0,0,0]
if name != None:
self.name = name
else:
self.name = ""
if ipv4_addr != None:
self.ipv4_addr = ipv4_addr
else:
self.ipv4_addr = 0
if ipv4_netmask != None:
self.ipv4_netmask = ipv4_netmask
else:
self.ipv4_netmask = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!6B", *self.hw_addr))
packed.append(b'\x00' * 2)
packed.append(struct.pack("!16s", self.name.encode()))
packed.append(struct.pack("!L", self.ipv4_addr))
packed.append(struct.pack("!L", self.ipv4_netmask))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_interface()
obj.hw_addr = list(reader.read('!6B'))
reader.skip(2)
obj.name = reader.read("!16s")[0].decode().rstrip("\x00")
obj.ipv4_addr = reader.read("!L")[0]
obj.ipv4_netmask = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.hw_addr != other.hw_addr: return False
if self.name != other.name: return False
if self.ipv4_addr != other.ipv4_addr: return False
if self.ipv4_netmask != other.ipv4_netmask: return False
return True
def pretty_print(self, q):
q.text("bsn_interface {")
with q.group():
with q.indent(2):
q.breakable()
q.text("hw_addr = ");
q.text(util.pretty_mac(self.hw_addr))
q.text(","); q.breakable()
q.text("name = ");
q.pp(self.name)
q.text(","); q.breakable()
q.text("ipv4_addr = ");
q.text(util.pretty_ipv4(self.ipv4_addr))
q.text(","); q.breakable()
q.text("ipv4_netmask = ");
q.text(util.pretty_ipv4(self.ipv4_netmask))
q.breakable()
q.text('}')
class bsn_lacp_stats_entry(loxi.OFObject):
def __init__(self, port_no=None, actor_sys_priority=None, actor_sys_mac=None, actor_port_priority=None, actor_port_num=None, actor_key=None, convergence_status=None, partner_sys_priority=None, partner_sys_mac=None, partner_port_priority=None, partner_port_num=None, partner_key=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if actor_sys_priority != None:
self.actor_sys_priority = actor_sys_priority
else:
self.actor_sys_priority = 0
if actor_sys_mac != None:
self.actor_sys_mac = actor_sys_mac
else:
self.actor_sys_mac = [0,0,0,0,0,0]
if actor_port_priority != None:
self.actor_port_priority = actor_port_priority
else:
self.actor_port_priority = 0
if actor_port_num != None:
self.actor_port_num = actor_port_num
else:
self.actor_port_num = 0
if actor_key != None:
self.actor_key = actor_key
else:
self.actor_key = 0
if convergence_status != None:
self.convergence_status = convergence_status
else:
self.convergence_status = 0
if partner_sys_priority != None:
self.partner_sys_priority = partner_sys_priority
else:
self.partner_sys_priority = 0
if partner_sys_mac != None:
self.partner_sys_mac = partner_sys_mac
else:
self.partner_sys_mac = [0,0,0,0,0,0]
if partner_port_priority != None:
self.partner_port_priority = partner_port_priority
else:
self.partner_port_priority = 0
if partner_port_num != None:
self.partner_port_num = partner_port_num
else:
self.partner_port_num = 0
if partner_key != None:
self.partner_key = partner_key
else:
self.partner_key = 0
return
def pack(self):
packed = []
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!H", self.actor_sys_priority))
packed.append(struct.pack("!6B", *self.actor_sys_mac))
packed.append(struct.pack("!H", self.actor_port_priority))
packed.append(struct.pack("!H", self.actor_port_num))
packed.append(struct.pack("!H", self.actor_key))
packed.append(struct.pack("!B", self.convergence_status))
packed.append(b'\x00' * 1)
packed.append(struct.pack("!H", self.partner_sys_priority))
packed.append(struct.pack("!6B", *self.partner_sys_mac))
packed.append(struct.pack("!H", self.partner_port_priority))
packed.append(struct.pack("!H", self.partner_port_num))
packed.append(struct.pack("!H", self.partner_key))
packed.append(b'\x00' * 2)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_lacp_stats_entry()
obj.port_no = util.unpack_port_no(reader)
obj.actor_sys_priority = reader.read("!H")[0]
obj.actor_sys_mac = list(reader.read('!6B'))
obj.actor_port_priority = reader.read("!H")[0]
obj.actor_port_num = reader.read("!H")[0]
obj.actor_key = reader.read("!H")[0]
obj.convergence_status = reader.read("!B")[0]
reader.skip(1)
obj.partner_sys_priority = reader.read("!H")[0]
obj.partner_sys_mac = list(reader.read('!6B'))
obj.partner_port_priority = reader.read("!H")[0]
obj.partner_port_num = reader.read("!H")[0]
obj.partner_key = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.actor_sys_priority != other.actor_sys_priority: return False
if self.actor_sys_mac != other.actor_sys_mac: return False
if self.actor_port_priority != other.actor_port_priority: return False
if self.actor_port_num != other.actor_port_num: return False
if self.actor_key != other.actor_key: return False
if self.convergence_status != other.convergence_status: return False
if self.partner_sys_priority != other.partner_sys_priority: return False
if self.partner_sys_mac != other.partner_sys_mac: return False
if self.partner_port_priority != other.partner_port_priority: return False
if self.partner_port_num != other.partner_port_num: return False
if self.partner_key != other.partner_key: return False
return True
def pretty_print(self, q):
q.text("bsn_lacp_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("actor_sys_priority = ");
q.text("%#x" % self.actor_sys_priority)
q.text(","); q.breakable()
q.text("actor_sys_mac = ");
q.text(util.pretty_mac(self.actor_sys_mac))
q.text(","); q.breakable()
q.text("actor_port_priority = ");
q.text("%#x" % self.actor_port_priority)
q.text(","); q.breakable()
q.text("actor_port_num = ");
q.text("%#x" % self.actor_port_num)
q.text(","); q.breakable()
q.text("actor_key = ");
q.text("%#x" % self.actor_key)
q.text(","); q.breakable()
q.text("convergence_status = ");
q.text("%#x" % self.convergence_status)
q.text(","); q.breakable()
q.text("partner_sys_priority = ");
q.text("%#x" % self.partner_sys_priority)
q.text(","); q.breakable()
q.text("partner_sys_mac = ");
q.text(util.pretty_mac(self.partner_sys_mac))
q.text(","); q.breakable()
q.text("partner_port_priority = ");
q.text("%#x" % self.partner_port_priority)
q.text(","); q.breakable()
q.text("partner_port_num = ");
q.text("%#x" % self.partner_port_num)
q.text(","); q.breakable()
q.text("partner_key = ");
q.text("%#x" % self.partner_key)
q.breakable()
q.text('}')
class bsn_port_counter_stats_entry(loxi.OFObject):
def __init__(self, port_no=None, values=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if values != None:
self.values = values
else:
self.values = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 2)
packed.append(util.pack_port_no(self.port_no))
packed.append(loxi.generic_util.pack_list(self.values))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_port_counter_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(2)
obj.port_no = util.unpack_port_no(reader)
obj.values = loxi.generic_util.unpack_list(reader, ofp.common.uint64.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.values != other.values: return False
return True
def pretty_print(self, q):
q.text("bsn_port_counter_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("values = ");
q.pp(self.values)
q.breakable()
q.text('}')
class bsn_switch_pipeline_stats_entry(loxi.OFObject):
def __init__(self, pipeline=None):
if pipeline != None:
self.pipeline = pipeline
else:
self.pipeline = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!256s", self.pipeline.encode()))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_switch_pipeline_stats_entry()
obj.pipeline = reader.read("!256s")[0].decode().rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.pipeline != other.pipeline: return False
return True
def pretty_print(self, q):
q.text("bsn_switch_pipeline_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("pipeline = ");
q.pp(self.pipeline)
q.breakable()
q.text('}')
class bsn_table_checksum_stats_entry(loxi.OFObject):
def __init__(self, table_id=None, checksum=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.table_id))
packed.append(struct.pack("!Q", self.checksum))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_table_checksum_stats_entry()
obj.table_id = reader.read("!B")[0]
obj.checksum = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.checksum != other.checksum: return False
return True
def pretty_print(self, q):
q.text("bsn_table_checksum_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("checksum = ");
q.text("%#x" % self.checksum)
q.breakable()
q.text('}')
class bsn_vport(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = bsn_vport.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn_vport()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("bsn_vport {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class bsn_vlan_counter_stats_entry(loxi.OFObject):
def __init__(self, vlan_vid=None, values=None):
if vlan_vid != None:
self.vlan_vid = vlan_vid
else:
self.vlan_vid = 0
if values != None:
self.values = values
else:
self.values = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", self.vlan_vid))
packed.append(b'\x00' * 4)
packed.append(loxi.generic_util.pack_list(self.values))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_vlan_counter_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.vlan_vid = reader.read("!H")[0]
reader.skip(4)
obj.values = loxi.generic_util.unpack_list(reader, ofp.common.uint64.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.vlan_vid != other.vlan_vid: return False
if self.values != other.values: return False
return True
def pretty_print(self, q):
q.text("bsn_vlan_counter_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("vlan_vid = ");
q.text("%#x" % self.vlan_vid)
q.text(","); q.breakable()
q.text("values = ");
q.pp(self.values)
q.breakable()
q.text('}')
class bsn_vlan_mac(loxi.OFObject):
def __init__(self, vlan_vid=None, mac=None):
if vlan_vid != None:
self.vlan_vid = vlan_vid
else:
self.vlan_vid = 0
if mac != None:
self.mac = mac
else:
self.mac = [0,0,0,0,0,0]
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.vlan_vid))
packed.append(struct.pack("!6B", *self.mac))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_vlan_mac()
obj.vlan_vid = reader.read("!H")[0]
obj.mac = list(reader.read('!6B'))
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.vlan_vid != other.vlan_vid: return False
if self.mac != other.mac: return False
return True
def pretty_print(self, q):
q.text("bsn_vlan_mac {")
with q.group():
with q.indent(2):
q.breakable()
q.text("vlan_vid = ");
q.text("%#x" % self.vlan_vid)
q.text(","); q.breakable()
q.text("mac = ");
q.text(util.pretty_mac(self.mac))
q.breakable()
q.text('}')
class bsn_vport_l2gre(bsn_vport):
type = 1
def __init__(self, flags=None, port_no=None, loopback_port_no=None, local_mac=None, nh_mac=None, src_ip=None, dst_ip=None, dscp=None, ttl=None, vpn=None, rate_limit=None, if_name=None):
if flags != None:
self.flags = flags
else:
self.flags = 0
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if loopback_port_no != None:
self.loopback_port_no = loopback_port_no
else:
self.loopback_port_no = 0
if local_mac != None:
self.local_mac = local_mac
else:
self.local_mac = [0,0,0,0,0,0]
if nh_mac != None:
self.nh_mac = nh_mac
else:
self.nh_mac = [0,0,0,0,0,0]
if src_ip != None:
self.src_ip = src_ip
else:
self.src_ip = 0
if dst_ip != None:
self.dst_ip = dst_ip
else:
self.dst_ip = 0
if dscp != None:
self.dscp = dscp
else:
self.dscp = 0
if ttl != None:
self.ttl = ttl
else:
self.ttl = 0
if vpn != None:
self.vpn = vpn
else:
self.vpn = 0
if rate_limit != None:
self.rate_limit = rate_limit
else:
self.rate_limit = 0
if if_name != None:
self.if_name = if_name
else:
self.if_name = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.flags))
packed.append(util.pack_port_no(self.port_no))
packed.append(util.pack_port_no(self.loopback_port_no))
packed.append(struct.pack("!6B", *self.local_mac))
packed.append(struct.pack("!6B", *self.nh_mac))
packed.append(struct.pack("!L", self.src_ip))
packed.append(struct.pack("!L", self.dst_ip))
packed.append(struct.pack("!B", self.dscp))
packed.append(struct.pack("!B", self.ttl))
packed.append(b'\x00' * 2)
packed.append(struct.pack("!L", self.vpn))
packed.append(struct.pack("!L", self.rate_limit))
packed.append(struct.pack("!16s", self.if_name.encode()))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_vport_l2gre()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.flags = reader.read("!L")[0]
obj.port_no = util.unpack_port_no(reader)
obj.loopback_port_no = util.unpack_port_no(reader)
obj.local_mac = list(reader.read('!6B'))
obj.nh_mac = list(reader.read('!6B'))
obj.src_ip = reader.read("!L")[0]
obj.dst_ip = reader.read("!L")[0]
obj.dscp = reader.read("!B")[0]
obj.ttl = reader.read("!B")[0]
reader.skip(2)
obj.vpn = reader.read("!L")[0]
obj.rate_limit = reader.read("!L")[0]
obj.if_name = reader.read("!16s")[0].decode().rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.flags != other.flags: return False
if self.port_no != other.port_no: return False
if self.loopback_port_no != other.loopback_port_no: return False
if self.local_mac != other.local_mac: return False
if self.nh_mac != other.nh_mac: return False
if self.src_ip != other.src_ip: return False
if self.dst_ip != other.dst_ip: return False
if self.dscp != other.dscp: return False
if self.ttl != other.ttl: return False
if self.vpn != other.vpn: return False
if self.rate_limit != other.rate_limit: return False
if self.if_name != other.if_name: return False
return True
def pretty_print(self, q):
q.text("bsn_vport_l2gre {")
with q.group():
with q.indent(2):
q.breakable()
q.text("flags = ");
value_name_map = {1: 'OF_BSN_VPORT_L2GRE_LOCAL_MAC_IS_VALID', 2: 'OF_BSN_VPORT_L2GRE_DSCP_ASSIGN', 4: 'OF_BSN_VPORT_L2GRE_DSCP_COPY', 8: 'OF_BSN_VPORT_L2GRE_LOOPBACK_IS_VALID', 16: 'OF_BSN_VPORT_L2GRE_RATE_LIMIT_IS_VALID'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(","); q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("loopback_port_no = ");
q.text(util.pretty_port(self.loopback_port_no))
q.text(","); q.breakable()
q.text("local_mac = ");
q.text(util.pretty_mac(self.local_mac))
q.text(","); q.breakable()
q.text("nh_mac = ");
q.text(util.pretty_mac(self.nh_mac))
q.text(","); q.breakable()
q.text("src_ip = ");
q.text(util.pretty_ipv4(self.src_ip))
q.text(","); q.breakable()
q.text("dst_ip = ");
q.text(util.pretty_ipv4(self.dst_ip))
q.text(","); q.breakable()
q.text("dscp = ");
q.text("%#x" % self.dscp)
q.text(","); q.breakable()
q.text("ttl = ");
q.text("%#x" % self.ttl)
q.text(","); q.breakable()
q.text("vpn = ");
q.text("%#x" % self.vpn)
q.text(","); q.breakable()
q.text("rate_limit = ");
q.text("%#x" % self.rate_limit)
q.text(","); q.breakable()
q.text("if_name = ");
q.pp(self.if_name)
q.breakable()
q.text('}')
bsn_vport.subtypes[1] = bsn_vport_l2gre
class bsn_vport_q_in_q(bsn_vport):
type = 0
def __init__(self, port_no=None, ingress_tpid=None, ingress_vlan_id=None, egress_tpid=None, egress_vlan_id=None, if_name=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if ingress_tpid != None:
self.ingress_tpid = ingress_tpid
else:
self.ingress_tpid = 0
if ingress_vlan_id != None:
self.ingress_vlan_id = ingress_vlan_id
else:
self.ingress_vlan_id = 0
if egress_tpid != None:
self.egress_tpid = egress_tpid
else:
self.egress_tpid = 0
if egress_vlan_id != None:
self.egress_vlan_id = egress_vlan_id
else:
self.egress_vlan_id = 0
if if_name != None:
self.if_name = if_name
else:
self.if_name = ""
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.port_no))
packed.append(struct.pack("!H", self.ingress_tpid))
packed.append(struct.pack("!H", self.ingress_vlan_id))
packed.append(struct.pack("!H", self.egress_tpid))
packed.append(struct.pack("!H", self.egress_vlan_id))
packed.append(struct.pack("!16s", self.if_name.encode()))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_vport_q_in_q()
_type = reader.read("!H")[0]
assert(_type == 0)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.port_no = reader.read("!L")[0]
obj.ingress_tpid = reader.read("!H")[0]
obj.ingress_vlan_id = reader.read("!H")[0]
obj.egress_tpid = reader.read("!H")[0]
obj.egress_vlan_id = reader.read("!H")[0]
obj.if_name = reader.read("!16s")[0].decode().rstrip("\x00")
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.ingress_tpid != other.ingress_tpid: return False
if self.ingress_vlan_id != other.ingress_vlan_id: return False
if self.egress_tpid != other.egress_tpid: return False
if self.egress_vlan_id != other.egress_vlan_id: return False
if self.if_name != other.if_name: return False
return True
def pretty_print(self, q):
q.text("bsn_vport_q_in_q {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text("%#x" % self.port_no)
q.text(","); q.breakable()
q.text("ingress_tpid = ");
q.text("%#x" % self.ingress_tpid)
q.text(","); q.breakable()
q.text("ingress_vlan_id = ");
q.text("%#x" % self.ingress_vlan_id)
q.text(","); q.breakable()
q.text("egress_tpid = ");
q.text("%#x" % self.egress_tpid)
q.text(","); q.breakable()
q.text("egress_vlan_id = ");
q.text("%#x" % self.egress_vlan_id)
q.text(","); q.breakable()
q.text("if_name = ");
q.pp(self.if_name)
q.breakable()
q.text('}')
bsn_vport.subtypes[0] = bsn_vport_q_in_q
class bsn_vrf_counter_stats_entry(loxi.OFObject):
def __init__(self, vrf=None, values=None):
if vrf != None:
self.vrf = vrf
else:
self.vrf = 0
if values != None:
self.values = values
else:
self.values = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 2)
packed.append(struct.pack("!L", self.vrf))
packed.append(loxi.generic_util.pack_list(self.values))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bsn_vrf_counter_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(2)
obj.vrf = reader.read("!L")[0]
obj.values = loxi.generic_util.unpack_list(reader, ofp.common.uint64.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.vrf != other.vrf: return False
if self.values != other.values: return False
return True
def pretty_print(self, q):
q.text("bsn_vrf_counter_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("vrf = ");
q.text("%#x" % self.vrf)
q.text(","); q.breakable()
q.text("values = ");
q.pp(self.values)
q.breakable()
q.text('}')
class bucket(loxi.OFObject):
def __init__(self, bucket_id=None, actions=None, properties=None):
if bucket_id != None:
self.bucket_id = bucket_id
else:
self.bucket_id = 0
if actions != None:
self.actions = actions
else:
self.actions = []
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for len at index 0
packed.append(struct.pack("!H", 0)) # placeholder for action_array_len at index 1
packed.append(struct.pack("!L", self.bucket_id))
packed.append(loxi.generic_util.pack_list(self.actions))
packed[1] = struct.pack("!H", len(packed[-1]))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bucket()
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 2)
_action_array_len = reader.read("!H")[0]
obj.bucket_id = reader.read("!L")[0]
obj.actions = loxi.generic_util.unpack_list(reader.slice(_action_array_len), ofp.action.action.unpack)
obj.properties = loxi.generic_util.unpack_list(reader, ofp.common.group_bucket_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.bucket_id != other.bucket_id: return False
if self.actions != other.actions: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("bucket {")
with q.group():
with q.indent(2):
q.breakable()
q.text("bucket_id = ");
value_name_map = {4294967040: 'OFPG_BUCKET_MAX', 4294967293: 'OFPG_BUCKET_FIRST', 4294967294: 'OFPG_BUCKET_LAST', 4294967295: 'OFPG_BUCKET_ALL'}
if self.bucket_id in value_name_map:
q.text("%s(%d)" % (value_name_map[self.bucket_id], self.bucket_id))
else:
q.text("%#x" % self.bucket_id)
q.text(","); q.breakable()
q.text("actions = ");
q.pp(self.actions)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class bucket_counter(loxi.OFObject):
def __init__(self, packet_count=None, byte_count=None):
if packet_count != None:
self.packet_count = packet_count
else:
self.packet_count = 0
if byte_count != None:
self.byte_count = byte_count
else:
self.byte_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.packet_count))
packed.append(struct.pack("!Q", self.byte_count))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bucket_counter()
obj.packet_count = reader.read("!Q")[0]
obj.byte_count = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
return True
def pretty_print(self, q):
q.text("bucket_counter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("packet_count = ");
q.text("%#x" % self.packet_count)
q.text(","); q.breakable()
q.text("byte_count = ");
q.text("%#x" % self.byte_count)
q.breakable()
q.text('}')
class bundle_features_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = bundle_features_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bundle_features_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("bundle_features_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class time(loxi.OFObject):
def __init__(self, seconds=None, nanoseconds=None):
if seconds != None:
self.seconds = seconds
else:
self.seconds = 0
if nanoseconds != None:
self.nanoseconds = nanoseconds
else:
self.nanoseconds = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.seconds))
packed.append(struct.pack("!L", self.nanoseconds))
packed.append(b'\x00' * 4)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = time()
obj.seconds = reader.read("!Q")[0]
obj.nanoseconds = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.seconds != other.seconds: return False
if self.nanoseconds != other.nanoseconds: return False
return True
def pretty_print(self, q):
q.text("time {")
with q.group():
with q.indent(2):
q.breakable()
q.text("seconds = ");
q.text("%#x" % self.seconds)
q.text(","); q.breakable()
q.text("nanoseconds = ");
q.text("%#x" % self.nanoseconds)
q.breakable()
q.text('}')
class bundle_features_prop_time(bundle_features_prop):
type = 1
def __init__(self, sched_accuracy=None, sched_max_future=None, sched_max_past=None, timestamp=None):
if sched_accuracy != None:
self.sched_accuracy = sched_accuracy
else:
self.sched_accuracy = loxi.unimplemented('init of_time_t')
if sched_max_future != None:
self.sched_max_future = sched_max_future
else:
self.sched_max_future = loxi.unimplemented('init of_time_t')
if sched_max_past != None:
self.sched_max_past = sched_max_past
else:
self.sched_max_past = loxi.unimplemented('init of_time_t')
if timestamp != None:
self.timestamp = timestamp
else:
self.timestamp = loxi.unimplemented('init of_time_t')
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(b'\x00' * 4)
packed.append(loxi.unimplemented('pack of_time_t'))
packed.append(loxi.unimplemented('pack of_time_t'))
packed.append(loxi.unimplemented('pack of_time_t'))
packed.append(loxi.unimplemented('pack of_time_t'))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = bundle_features_prop_time()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
reader.skip(4)
obj.sched_accuracy = loxi.unimplemented('unpack of_time_t')
obj.sched_max_future = loxi.unimplemented('unpack of_time_t')
obj.sched_max_past = loxi.unimplemented('unpack of_time_t')
obj.timestamp = loxi.unimplemented('unpack of_time_t')
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.sched_accuracy != other.sched_accuracy: return False
if self.sched_max_future != other.sched_max_future: return False
if self.sched_max_past != other.sched_max_past: return False
if self.timestamp != other.timestamp: return False
return True
def pretty_print(self, q):
q.text("bundle_features_prop_time {")
with q.group():
with q.indent(2):
q.breakable()
q.text("sched_accuracy = ");
q.pp(self.sched_accuracy)
q.text(","); q.breakable()
q.text("sched_max_future = ");
q.pp(self.sched_max_future)
q.text(","); q.breakable()
q.text("sched_max_past = ");
q.pp(self.sched_max_past)
q.text(","); q.breakable()
q.text("timestamp = ");
q.pp(self.timestamp)
q.breakable()
q.text('}')
bundle_features_prop.subtypes[1] = bundle_features_prop_time
class controller_status_entry(loxi.OFObject):
def __init__(self, short_id=None, role=None, reason=None, channel_status=None, properties=None):
if short_id != None:
self.short_id = short_id
else:
self.short_id = 0
if role != None:
self.role = role
else:
self.role = 0
if reason != None:
self.reason = reason
else:
self.reason = 0
if channel_status != None:
self.channel_status = channel_status
else:
self.channel_status = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", self.short_id))
packed.append(struct.pack("!L", self.role))
packed.append(struct.pack("!B", self.reason))
packed.append(struct.pack("!B", self.channel_status))
packed.append(b'\x00' * 6)
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = controller_status_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.short_id = reader.read("!H")[0]
obj.role = reader.read("!L")[0]
obj.reason = reader.read("!B")[0]
obj.channel_status = reader.read("!B")[0]
reader.skip(6)
obj.properties = loxi.generic_util.unpack_list(reader, ofp.common.controller_status_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.short_id != other.short_id: return False
if self.role != other.role: return False
if self.reason != other.reason: return False
if self.channel_status != other.channel_status: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("controller_status_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("short_id = ");
q.text("%#x" % self.short_id)
q.text(","); q.breakable()
q.text("role = ");
value_name_map = {0: 'OFPCR_ROLE_NOCHANGE', 1: 'OFPCR_ROLE_EQUAL', 2: 'OFPCR_ROLE_MASTER', 3: 'OFPCR_ROLE_SLAVE'}
if self.role in value_name_map:
q.text("%s(%d)" % (value_name_map[self.role], self.role))
else:
q.text("%#x" % self.role)
q.text(","); q.breakable()
q.text("reason = ");
value_name_map = {0: 'OFPCSR_REQUEST', 1: 'OFPCSR_CHANNEL_STATUS', 2: 'OFPCSR_ROLE', 3: 'OFPCSR_CONTROLLER_ADDED', 4: 'OFPCSR_CONTROLLER_REMOVED', 5: 'OFPCSR_SHORT_ID', 6: 'OFPCSR_EXPERIMENTER'}
if self.reason in value_name_map:
q.text("%s(%d)" % (value_name_map[self.reason], self.reason))
else:
q.text("%#x" % self.reason)
q.text(","); q.breakable()
q.text("channel_status = ");
value_name_map = {0: 'OFPCT_STATUS_UP', 1: 'OFPCT_STATUS_DOWN'}
if self.channel_status in value_name_map:
q.text("%s(%d)" % (value_name_map[self.channel_status], self.channel_status))
else:
q.text("%#x" % self.channel_status)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class controller_status_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = controller_status_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = controller_status_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("controller_status_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class flow_lightweight_stats_entry(loxi.OFObject):
def __init__(self, table_id=None, reason=None, priority=None, match=None, stats=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if reason != None:
self.reason = reason
else:
self.reason = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if match != None:
self.match = match
else:
self.match = ofp.match()
if stats != None:
self.stats = stats
else:
self.stats = ofp.stat()
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 2)
packed.append(struct.pack("!B", self.table_id))
packed.append(struct.pack("!B", self.reason))
packed.append(struct.pack("!H", self.priority))
packed.append(self.match.pack())
packed.append(self.stats.pack())
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = flow_lightweight_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(2)
obj.table_id = reader.read("!B")[0]
obj.reason = reader.read("!B")[0]
obj.priority = reader.read("!H")[0]
obj.match = ofp.match.unpack(reader)
obj.stats = ofp.stat.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.reason != other.reason: return False
if self.priority != other.priority: return False
if self.match != other.match: return False
if self.stats != other.stats: return False
return True
def pretty_print(self, q):
q.text("flow_lightweight_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("reason = ");
value_name_map = {0: 'OFPFSR_STATS_REQUEST', 1: 'OFPFSR_STAT_TRIGGER'}
if self.reason in value_name_map:
q.text("%s(%d)" % (value_name_map[self.reason], self.reason))
else:
q.text("%#x" % self.reason)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("stats = ");
q.pp(self.stats)
q.breakable()
q.text('}')
class flow_monitor_entry(loxi.OFObject):
def __init__(self, monitor_id=None, out_port=None, out_group=None, flags=None, table_id=None, command=None, match=None):
if monitor_id != None:
self.monitor_id = monitor_id
else:
self.monitor_id = 0
if out_port != None:
self.out_port = out_port
else:
self.out_port = 0
if out_group != None:
self.out_group = out_group
else:
self.out_group = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if command != None:
self.command = command
else:
self.command = 0
if match != None:
self.match = match
else:
self.match = ofp.match()
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.monitor_id))
packed.append(struct.pack("!L", self.out_port))
packed.append(struct.pack("!L", self.out_group))
packed.append(struct.pack("!H", self.flags))
packed.append(struct.pack("!B", self.table_id))
packed.append(struct.pack("!B", self.command))
packed.append(self.match.pack())
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = flow_monitor_entry()
obj.monitor_id = reader.read("!L")[0]
obj.out_port = reader.read("!L")[0]
obj.out_group = reader.read("!L")[0]
obj.flags = reader.read("!H")[0]
obj.table_id = reader.read("!B")[0]
obj.command = reader.read("!B")[0]
obj.match = ofp.match.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.monitor_id != other.monitor_id: return False
if self.out_port != other.out_port: return False
if self.out_group != other.out_group: return False
if self.flags != other.flags: return False
if self.table_id != other.table_id: return False
if self.command != other.command: return False
if self.match != other.match: return False
return True
def pretty_print(self, q):
q.text("flow_monitor_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("monitor_id = ");
q.text("%#x" % self.monitor_id)
q.text(","); q.breakable()
q.text("out_port = ");
q.text("%#x" % self.out_port)
q.text(","); q.breakable()
q.text("out_group = ");
q.text("%#x" % self.out_group)
q.text(","); q.breakable()
q.text("flags = ");
value_name_map = {1: 'OFPFMF_INITIAL', 2: 'OFPFMF_ADD', 4: 'OFPFMF_REMOVED', 8: 'OFPFMF_MODIFY', 16: 'OFPFMF_INSTRUCTIONS', 32: 'OFPFMF_NO_ABBREV', 64: 'OFPFMF_ONLY_OWN'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(","); q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("command = ");
value_name_map = {0: 'OFPFMC_ADD', 1: 'OFPFMC_MODIFY', 2: 'OFPFMC_DELETE'}
if self.command in value_name_map:
q.text("%s(%d)" % (value_name_map[self.command], self.command))
else:
q.text("%#x" % self.command)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.breakable()
q.text('}')
class flow_monitor_reply_entry(loxi.OFObject):
def __init__(self, event=None):
if event != None:
self.event = event
else:
self.event = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", self.event))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = flow_monitor_reply_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.event = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.event != other.event: return False
return True
def pretty_print(self, q):
q.text("flow_monitor_reply_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("event = ");
value_name_map = {0: 'OFPFME_INITIAL', 1: 'OFPFME_ADDED', 2: 'OFPFME_REMOVED', 3: 'OFPFME_MODIFIED', 4: 'OFPFME_ABBREV', 5: 'OFPFME_PAUSED', 6: 'OFPFME_RESUMED'}
if self.event in value_name_map:
q.text("%s(%d)" % (value_name_map[self.event], self.event))
else:
q.text("%#x" % self.event)
q.breakable()
q.text('}')
class flow_stats_entry(loxi.OFObject):
def __init__(self, table_id=None, priority=None, idle_timeout=None, hard_timeout=None, flags=None, importance=None, cookie=None, match=None, stats=None, instructions=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if priority != None:
self.priority = priority
else:
self.priority = 0
if idle_timeout != None:
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if hard_timeout != None:
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if flags != None:
self.flags = flags
else:
self.flags = 0
if importance != None:
self.importance = importance
else:
self.importance = 0
if cookie != None:
self.cookie = cookie
else:
self.cookie = 0
if match != None:
self.match = match
else:
self.match = ofp.match()
if stats != None:
self.stats = stats
else:
self.stats = ofp.stat()
if instructions != None:
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 2)
packed.append(struct.pack("!B", self.table_id))
packed.append(b'\x00' * 1)
packed.append(struct.pack("!H", self.priority))
packed.append(struct.pack("!H", self.idle_timeout))
packed.append(struct.pack("!H", self.hard_timeout))
packed.append(struct.pack("!H", self.flags))
packed.append(struct.pack("!H", self.importance))
packed.append(struct.pack("!Q", self.cookie))
packed.append(self.match.pack())
packed.append(self.stats.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = flow_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(2)
obj.table_id = reader.read("!B")[0]
reader.skip(1)
obj.priority = reader.read("!H")[0]
obj.idle_timeout = reader.read("!H")[0]
obj.hard_timeout = reader.read("!H")[0]
obj.flags = reader.read("!H")[0]
obj.importance = reader.read("!H")[0]
obj.cookie = reader.read("!Q")[0]
obj.match = ofp.match.unpack(reader)
obj.stats = ofp.stat.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, ofp.instruction.instruction.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.priority != other.priority: return False
if self.idle_timeout != other.idle_timeout: return False
if self.hard_timeout != other.hard_timeout: return False
if self.flags != other.flags: return False
if self.importance != other.importance: return False
if self.cookie != other.cookie: return False
if self.match != other.match: return False
if self.stats != other.stats: return False
if self.instructions != other.instructions: return False
return True
def pretty_print(self, q):
q.text("flow_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("priority = ");
q.text("%#x" % self.priority)
q.text(","); q.breakable()
q.text("idle_timeout = ");
q.text("%#x" % self.idle_timeout)
q.text(","); q.breakable()
q.text("hard_timeout = ");
q.text("%#x" % self.hard_timeout)
q.text(","); q.breakable()
q.text("flags = ");
value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_RESET_COUNTS', 8: 'OFPFF_NO_PKT_COUNTS', 16: 'OFPFF_NO_BYT_COUNTS', 128: 'OFPFF_BSN_SEND_IDLE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(","); q.breakable()
q.text("importance = ");
q.text("%#x" % self.importance)
q.text(","); q.breakable()
q.text("cookie = ");
q.text("%#x" % self.cookie)
q.text(","); q.breakable()
q.text("match = ");
q.pp(self.match)
q.text(","); q.breakable()
q.text("stats = ");
q.pp(self.stats)
q.text(","); q.breakable()
q.text("instructions = ");
q.pp(self.instructions)
q.breakable()
q.text('}')
class group_bucket_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = group_bucket_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = group_bucket_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("group_bucket_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class group_bucket_prop_experimenter(group_bucket_prop):
subtypes = {}
type = 3
def __init__(self, experimenter=None, exp_type=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if exp_type != None:
self.exp_type = exp_type
else:
self.exp_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.exp_type))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = group_bucket_prop_experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = group_bucket_prop_experimenter()
_type = reader.read("!H")[0]
assert(_type == 3)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.exp_type = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.exp_type != other.exp_type: return False
return True
def pretty_print(self, q):
q.text("group_bucket_prop_experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("exp_type = ");
q.text("%#x" % self.exp_type)
q.breakable()
q.text('}')
group_bucket_prop.subtypes[3] = group_bucket_prop_experimenter
class group_bucket_prop_watch_group(group_bucket_prop):
type = 2
def __init__(self, watch=None):
if watch != None:
self.watch = watch
else:
self.watch = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.watch))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = group_bucket_prop_watch_group()
_type = reader.read("!H")[0]
assert(_type == 2)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.watch = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.watch != other.watch: return False
return True
def pretty_print(self, q):
q.text("group_bucket_prop_watch_group {")
with q.group():
with q.indent(2):
q.breakable()
q.text("watch = ");
q.text("%#x" % self.watch)
q.breakable()
q.text('}')
group_bucket_prop.subtypes[2] = group_bucket_prop_watch_group
class group_bucket_prop_watch_port(group_bucket_prop):
type = 1
def __init__(self, watch=None):
if watch != None:
self.watch = watch
else:
self.watch = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.watch))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = group_bucket_prop_watch_port()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.watch = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.watch != other.watch: return False
return True
def pretty_print(self, q):
q.text("group_bucket_prop_watch_port {")
with q.group():
with q.indent(2):
q.breakable()
q.text("watch = ");
q.text("%#x" % self.watch)
q.breakable()
q.text('}')
group_bucket_prop.subtypes[1] = group_bucket_prop_watch_port
class group_bucket_prop_weight(group_bucket_prop):
type = 0
def __init__(self, weight=None):
if weight != None:
self.weight = weight
else:
self.weight = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!H", self.weight))
packed.append(b'\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = group_bucket_prop_weight()
_type = reader.read("!H")[0]
assert(_type == 0)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.weight = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.weight != other.weight: return False
return True
def pretty_print(self, q):
q.text("group_bucket_prop_weight {")
with q.group():
with q.indent(2):
q.breakable()
q.text("weight = ");
q.text("%#x" % self.weight)
q.breakable()
q.text('}')
group_bucket_prop.subtypes[0] = group_bucket_prop_weight
class group_desc_stats_entry(loxi.OFObject):
def __init__(self, group_type=None, group_id=None, buckets=None, properties=None):
if group_type != None:
self.group_type = group_type
else:
self.group_type = 0
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
if buckets != None:
self.buckets = buckets
else:
self.buckets = []
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!B", self.group_type))
packed.append(b'\x00' * 1)
packed.append(struct.pack("!L", self.group_id))
packed.append(struct.pack("!H", 0)) # placeholder for bucket_array_len at index 4
packed.append(b'\x00' * 6)
packed.append(loxi.generic_util.pack_list(self.buckets))
packed[4] = struct.pack("!H", len(packed[-1]))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = group_desc_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.group_type = reader.read("!B")[0]
reader.skip(1)
obj.group_id = reader.read("!L")[0]
_bucket_array_len = reader.read("!H")[0]
reader.skip(6)
obj.buckets = loxi.generic_util.unpack_list(reader.slice(_bucket_array_len), ofp.common.bucket.unpack)
obj.properties = loxi.generic_util.unpack_list(reader, ofp.common.group_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.group_type != other.group_type: return False
if self.group_id != other.group_id: return False
if self.buckets != other.buckets: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("group_desc_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("group_type = ");
value_name_map = {0: 'OFPGT_ALL', 1: 'OFPGT_SELECT', 2: 'OFPGT_INDIRECT', 3: 'OFPGT_FF'}
if self.group_type in value_name_map:
q.text("%s(%d)" % (value_name_map[self.group_type], self.group_type))
else:
q.text("%#x" % self.group_type)
q.text(","); q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.text(","); q.breakable()
q.text("buckets = ");
q.pp(self.buckets)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class group_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = group_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = group_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("group_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class group_prop_experimenter(loxi.OFObject):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, exp_type=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if exp_type != None:
self.exp_type = exp_type
else:
self.exp_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.exp_type))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = group_prop_experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = group_prop_experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.exp_type = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.exp_type != other.exp_type: return False
return True
def pretty_print(self, q):
q.text("group_prop_experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("exp_type = ");
q.text("%#x" % self.exp_type)
q.breakable()
q.text('}')
class group_stats_entry(loxi.OFObject):
def __init__(self, group_id=None, ref_count=None, packet_count=None, byte_count=None, duration_sec=None, duration_nsec=None, bucket_stats=None):
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
if ref_count != None:
self.ref_count = ref_count
else:
self.ref_count = 0
if packet_count != None:
self.packet_count = packet_count
else:
self.packet_count = 0
if byte_count != None:
self.byte_count = byte_count
else:
self.byte_count = 0
if duration_sec != None:
self.duration_sec = duration_sec
else:
self.duration_sec = 0
if duration_nsec != None:
self.duration_nsec = duration_nsec
else:
self.duration_nsec = 0
if bucket_stats != None:
self.bucket_stats = bucket_stats
else:
self.bucket_stats = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 2)
packed.append(struct.pack("!L", self.group_id))
packed.append(struct.pack("!L", self.ref_count))
packed.append(b'\x00' * 4)
packed.append(struct.pack("!Q", self.packet_count))
packed.append(struct.pack("!Q", self.byte_count))
packed.append(struct.pack("!L", self.duration_sec))
packed.append(struct.pack("!L", self.duration_nsec))
packed.append(loxi.generic_util.pack_list(self.bucket_stats))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = group_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(2)
obj.group_id = reader.read("!L")[0]
obj.ref_count = reader.read("!L")[0]
reader.skip(4)
obj.packet_count = reader.read("!Q")[0]
obj.byte_count = reader.read("!Q")[0]
obj.duration_sec = reader.read("!L")[0]
obj.duration_nsec = reader.read("!L")[0]
obj.bucket_stats = loxi.generic_util.unpack_list(reader, ofp.common.bucket_counter.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.group_id != other.group_id: return False
if self.ref_count != other.ref_count: return False
if self.packet_count != other.packet_count: return False
if self.byte_count != other.byte_count: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.bucket_stats != other.bucket_stats: return False
return True
def pretty_print(self, q):
q.text("group_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.text(","); q.breakable()
q.text("ref_count = ");
q.text("%#x" % self.ref_count)
q.text(","); q.breakable()
q.text("packet_count = ");
q.text("%#x" % self.packet_count)
q.text(","); q.breakable()
q.text("byte_count = ");
q.text("%#x" % self.byte_count)
q.text(","); q.breakable()
q.text("duration_sec = ");
q.text("%#x" % self.duration_sec)
q.text(","); q.breakable()
q.text("duration_nsec = ");
q.text("%#x" % self.duration_nsec)
q.text(","); q.breakable()
q.text("bucket_stats = ");
q.pp(self.bucket_stats)
q.breakable()
q.text('}')
class header_type(loxi.OFObject):
subtypes = {}
def __init__(self, namespace=None, ns_type=None):
if namespace != None:
self.namespace = namespace
else:
self.namespace = 0
if ns_type != None:
self.ns_type = ns_type
else:
self.ns_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.namespace))
packed.append(struct.pack("!H", self.ns_type))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = header_type.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = header_type()
obj.namespace = reader.read("!H")[0]
obj.ns_type = reader.read("!H")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.namespace != other.namespace: return False
if self.ns_type != other.ns_type: return False
return True
def pretty_print(self, q):
q.text("header_type {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ns_type = ");
q.text("%#x" % self.ns_type)
q.breakable()
q.text('}')
class hello_elem(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = hello_elem.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = hello_elem()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("hello_elem {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class hello_elem_versionbitmap(hello_elem):
type = 1
def __init__(self, bitmaps=None):
if bitmaps != None:
self.bitmaps = bitmaps
else:
self.bitmaps = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.bitmaps))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = hello_elem_versionbitmap()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.bitmaps = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.bitmaps != other.bitmaps: return False
return True
def pretty_print(self, q):
q.text("hello_elem_versionbitmap {")
with q.group():
with q.indent(2):
q.breakable()
q.text("bitmaps = ");
q.pp(self.bitmaps)
q.breakable()
q.text('}')
hello_elem.subtypes[1] = hello_elem_versionbitmap
class match_v3(loxi.OFObject):
type = 1
def __init__(self, oxm_list=None):
if oxm_list != None:
self.oxm_list = oxm_list
else:
self.oxm_list = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_list))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
packed.append(loxi.generic_util.pad_to(8, length))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = match_v3()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_list = loxi.generic_util.unpack_list(reader, ofp.oxm.oxm.unpack)
orig_reader.skip_align()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_list != other.oxm_list: return False
return True
def pretty_print(self, q):
q.text("match_v3 {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_list = ");
q.pp(self.oxm_list)
q.breakable()
q.text('}')
class meter_band_stats(loxi.OFObject):
def __init__(self, packet_band_count=None, byte_band_count=None):
if packet_band_count != None:
self.packet_band_count = packet_band_count
else:
self.packet_band_count = 0
if byte_band_count != None:
self.byte_band_count = byte_band_count
else:
self.byte_band_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.packet_band_count))
packed.append(struct.pack("!Q", self.byte_band_count))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = meter_band_stats()
obj.packet_band_count = reader.read("!Q")[0]
obj.byte_band_count = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.packet_band_count != other.packet_band_count: return False
if self.byte_band_count != other.byte_band_count: return False
return True
def pretty_print(self, q):
q.text("meter_band_stats {")
with q.group():
with q.indent(2):
q.breakable()
q.text("packet_band_count = ");
q.text("%#x" % self.packet_band_count)
q.text(","); q.breakable()
q.text("byte_band_count = ");
q.text("%#x" % self.byte_band_count)
q.breakable()
q.text('}')
class meter_config(loxi.OFObject):
def __init__(self, flags=None, meter_id=None, entries=None):
if flags != None:
self.flags = flags
else:
self.flags = 0
if meter_id != None:
self.meter_id = meter_id
else:
self.meter_id = 0
if entries != None:
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!H", self.flags))
packed.append(struct.pack("!L", self.meter_id))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = meter_config()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.flags = reader.read("!H")[0]
obj.meter_id = reader.read("!L")[0]
obj.entries = loxi.generic_util.unpack_list(reader, ofp.meter_band.meter_band.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.flags != other.flags: return False
if self.meter_id != other.meter_id: return False
if self.entries != other.entries: return False
return True
def pretty_print(self, q):
q.text("meter_config {")
with q.group():
with q.indent(2):
q.breakable()
q.text("flags = ");
value_name_map = {1: 'OFPMF_KBPS', 2: 'OFPMF_PKTPS', 4: 'OFPMF_BURST', 8: 'OFPMF_STATS'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(","); q.breakable()
q.text("meter_id = ");
q.text("%#x" % self.meter_id)
q.text(","); q.breakable()
q.text("entries = ");
q.pp(self.entries)
q.breakable()
q.text('}')
class meter_features(loxi.OFObject):
def __init__(self, max_meter=None, band_types=None, capabilities=None, max_bands=None, max_color=None, features=None):
if max_meter != None:
self.max_meter = max_meter
else:
self.max_meter = 0
if band_types != None:
self.band_types = band_types
else:
self.band_types = 0
if capabilities != None:
self.capabilities = capabilities
else:
self.capabilities = 0
if max_bands != None:
self.max_bands = max_bands
else:
self.max_bands = 0
if max_color != None:
self.max_color = max_color
else:
self.max_color = 0
if features != None:
self.features = features
else:
self.features = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.max_meter))
packed.append(struct.pack("!L", self.band_types))
packed.append(struct.pack("!L", self.capabilities))
packed.append(struct.pack("!B", self.max_bands))
packed.append(struct.pack("!B", self.max_color))
packed.append(b'\x00' * 2)
packed.append(struct.pack("!L", self.features))
packed.append(b'\x00' * 4)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = meter_features()
obj.max_meter = reader.read("!L")[0]
obj.band_types = reader.read("!L")[0]
obj.capabilities = reader.read("!L")[0]
obj.max_bands = reader.read("!B")[0]
obj.max_color = reader.read("!B")[0]
reader.skip(2)
obj.features = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.max_meter != other.max_meter: return False
if self.band_types != other.band_types: return False
if self.capabilities != other.capabilities: return False
if self.max_bands != other.max_bands: return False
if self.max_color != other.max_color: return False
if self.features != other.features: return False
return True
def pretty_print(self, q):
q.text("meter_features {")
with q.group():
with q.indent(2):
q.breakable()
q.text("max_meter = ");
q.text("%#x" % self.max_meter)
q.text(","); q.breakable()
q.text("band_types = ");
q.text("%#x" % self.band_types)
q.text(","); q.breakable()
q.text("capabilities = ");
q.text("%#x" % self.capabilities)
q.text(","); q.breakable()
q.text("max_bands = ");
q.text("%#x" % self.max_bands)
q.text(","); q.breakable()
q.text("max_color = ");
q.text("%#x" % self.max_color)
q.text(","); q.breakable()
q.text("features = ");
q.text("%#x" % self.features)
q.breakable()
q.text('}')
class meter_stats(loxi.OFObject):
def __init__(self, meter_id=None, ref_count=None, packet_in_count=None, byte_in_count=None, duration_sec=None, duration_nsec=None, band_stats=None):
if meter_id != None:
self.meter_id = meter_id
else:
self.meter_id = 0
if ref_count != None:
self.ref_count = ref_count
else:
self.ref_count = 0
if packet_in_count != None:
self.packet_in_count = packet_in_count
else:
self.packet_in_count = 0
if byte_in_count != None:
self.byte_in_count = byte_in_count
else:
self.byte_in_count = 0
if duration_sec != None:
self.duration_sec = duration_sec
else:
self.duration_sec = 0
if duration_nsec != None:
self.duration_nsec = duration_nsec
else:
self.duration_nsec = 0
if band_stats != None:
self.band_stats = band_stats
else:
self.band_stats = []
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.meter_id))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(b'\x00' * 6)
packed.append(struct.pack("!L", self.ref_count))
packed.append(struct.pack("!Q", self.packet_in_count))
packed.append(struct.pack("!Q", self.byte_in_count))
packed.append(struct.pack("!L", self.duration_sec))
packed.append(struct.pack("!L", self.duration_nsec))
packed.append(loxi.generic_util.pack_list(self.band_stats))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = meter_stats()
obj.meter_id = reader.read("!L")[0]
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 6)
reader.skip(6)
obj.ref_count = reader.read("!L")[0]
obj.packet_in_count = reader.read("!Q")[0]
obj.byte_in_count = reader.read("!Q")[0]
obj.duration_sec = reader.read("!L")[0]
obj.duration_nsec = reader.read("!L")[0]
obj.band_stats = loxi.generic_util.unpack_list(reader, ofp.common.meter_band_stats.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.meter_id != other.meter_id: return False
if self.ref_count != other.ref_count: return False
if self.packet_in_count != other.packet_in_count: return False
if self.byte_in_count != other.byte_in_count: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.band_stats != other.band_stats: return False
return True
def pretty_print(self, q):
q.text("meter_stats {")
with q.group():
with q.indent(2):
q.breakable()
q.text("meter_id = ");
q.text("%#x" % self.meter_id)
q.text(","); q.breakable()
q.text("ref_count = ");
q.text("%#x" % self.ref_count)
q.text(","); q.breakable()
q.text("packet_in_count = ");
q.text("%#x" % self.packet_in_count)
q.text(","); q.breakable()
q.text("byte_in_count = ");
q.text("%#x" % self.byte_in_count)
q.text(","); q.breakable()
q.text("duration_sec = ");
q.text("%#x" % self.duration_sec)
q.text(","); q.breakable()
q.text("duration_nsec = ");
q.text("%#x" % self.duration_nsec)
q.text(","); q.breakable()
q.text("band_stats = ");
q.pp(self.band_stats)
q.breakable()
q.text('}')
class oxs(loxi.OFObject):
subtypes = {}
def __init__(self, type_len=None):
if type_len != None:
self.type_len = type_len
else:
self.type_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.type_len))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 0)
subclass = oxs.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = oxs()
obj.type_len = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type_len != other.type_len: return False
return True
def pretty_print(self, q):
q.text("oxs {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class oxs_byte_count(oxs):
type_len = 2147616776
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.type_len))
packed.append(struct.pack("!Q", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = oxs_byte_count()
_type_len = reader.read("!L")[0]
assert(_type_len == 2147616776)
obj.value = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("oxs_byte_count {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
oxs.subtypes[2147616776] = oxs_byte_count
class oxs_duration(oxs):
type_len = 2147614728
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.type_len))
packed.append(struct.pack("!Q", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = oxs_duration()
_type_len = reader.read("!L")[0]
assert(_type_len == 2147614728)
obj.value = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("oxs_duration {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
oxs.subtypes[2147614728] = oxs_duration
class oxs_flow_count(oxs):
type_len = 2147615748
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.type_len))
packed.append(struct.pack("!L", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = oxs_flow_count()
_type_len = reader.read("!L")[0]
assert(_type_len == 2147615748)
obj.value = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("oxs_flow_count {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
oxs.subtypes[2147615748] = oxs_flow_count
class oxs_idle_time(oxs):
type_len = 2147615240
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.type_len))
packed.append(struct.pack("!Q", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = oxs_idle_time()
_type_len = reader.read("!L")[0]
assert(_type_len == 2147615240)
obj.value = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("oxs_idle_time {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
oxs.subtypes[2147615240] = oxs_idle_time
class oxs_packet_count(oxs):
type_len = 2147616264
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.type_len))
packed.append(struct.pack("!Q", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = oxs_packet_count()
_type_len = reader.read("!L")[0]
assert(_type_len == 2147616264)
obj.value = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("oxs_packet_count {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
oxs.subtypes[2147616264] = oxs_packet_count
class packet_queue(loxi.OFObject):
def __init__(self, queue_id=None, port=None, properties=None):
if queue_id != None:
self.queue_id = queue_id
else:
self.queue_id = 0
if port != None:
self.port = port
else:
self.port = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.queue_id))
packed.append(util.pack_port_no(self.port))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 2
packed.append(b'\x00' * 6)
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = packet_queue()
obj.queue_id = reader.read("!L")[0]
obj.port = util.unpack_port_no(reader)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 10)
reader.skip(6)
obj.properties = loxi.generic_util.unpack_list(reader, ofp.common.queue_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.queue_id != other.queue_id: return False
if self.port != other.port: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("packet_queue {")
with q.group():
with q.indent(2):
q.breakable()
q.text("queue_id = ");
q.text("%#x" % self.queue_id)
q.text(","); q.breakable()
q.text("port = ");
q.text(util.pretty_port(self.port))
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class port_desc(loxi.OFObject):
def __init__(self, port_no=None, hw_addr=None, name=None, config=None, state=None, properties=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if hw_addr != None:
self.hw_addr = hw_addr
else:
self.hw_addr = [0,0,0,0,0,0]
if name != None:
self.name = name
else:
self.name = ""
if config != None:
self.config = config
else:
self.config = 0
if state != None:
self.state = state
else:
self.state = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(b'\x00' * 2)
packed.append(struct.pack("!6B", *self.hw_addr))
packed.append(b'\x00' * 2)
packed.append(struct.pack("!16s", self.name.encode()))
packed.append(struct.pack("!L", self.config))
packed.append(struct.pack("!L", self.state))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = port_desc()
obj.port_no = util.unpack_port_no(reader)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 6)
reader.skip(2)
obj.hw_addr = list(reader.read('!6B'))
reader.skip(2)
obj.name = reader.read("!16s")[0].decode().rstrip("\x00")
obj.config = reader.read("!L")[0]
obj.state = reader.read("!L")[0]
obj.properties = loxi.generic_util.unpack_list(reader, ofp.port_desc_prop.port_desc_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.hw_addr != other.hw_addr: return False
if self.name != other.name: return False
if self.config != other.config: return False
if self.state != other.state: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("port_desc {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("hw_addr = ");
q.text(util.pretty_mac(self.hw_addr))
q.text(","); q.breakable()
q.text("name = ");
q.pp(self.name)
q.text(","); q.breakable()
q.text("config = ");
value_name_map = {1: 'OFPPC_PORT_DOWN', 4: 'OFPPC_NO_RECV', 32: 'OFPPC_NO_FWD', 64: 'OFPPC_NO_PACKET_IN', 2147483648: 'OFPPC_BSN_MIRROR_DEST'}
q.text(util.pretty_flags(self.config, value_name_map.values()))
q.text(","); q.breakable()
q.text("state = ");
value_name_map = {1: 'OFPPS_LINK_DOWN', 2: 'OFPPS_BLOCKED', 4: 'OFPPS_LIVE'}
q.text(util.pretty_flags(self.state, value_name_map.values()))
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class port_desc_prop_egress(loxi.OFObject):
type = 3
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = b''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(self.oxm_ids)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = port_desc_prop_egress()
_type = reader.read("!H")[0]
assert(_type == 3)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = reader.read_all()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("port_desc_prop_egress {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
class port_desc_prop_ingress(loxi.OFObject):
type = 2
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = b''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(self.oxm_ids)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = port_desc_prop_ingress()
_type = reader.read("!H")[0]
assert(_type == 2)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = reader.read_all()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("port_desc_prop_ingress {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
class port_stats_entry(loxi.OFObject):
def __init__(self, port_no=None, duration_sec=None, duration_nsec=None, rx_packets=None, tx_packets=None, rx_bytes=None, tx_bytes=None, rx_dropped=None, tx_dropped=None, rx_errors=None, tx_errors=None, properties=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if duration_sec != None:
self.duration_sec = duration_sec
else:
self.duration_sec = 0
if duration_nsec != None:
self.duration_nsec = duration_nsec
else:
self.duration_nsec = 0
if rx_packets != None:
self.rx_packets = rx_packets
else:
self.rx_packets = 0
if tx_packets != None:
self.tx_packets = tx_packets
else:
self.tx_packets = 0
if rx_bytes != None:
self.rx_bytes = rx_bytes
else:
self.rx_bytes = 0
if tx_bytes != None:
self.tx_bytes = tx_bytes
else:
self.tx_bytes = 0
if rx_dropped != None:
self.rx_dropped = rx_dropped
else:
self.rx_dropped = 0
if tx_dropped != None:
self.tx_dropped = tx_dropped
else:
self.tx_dropped = 0
if rx_errors != None:
self.rx_errors = rx_errors
else:
self.rx_errors = 0
if tx_errors != None:
self.tx_errors = tx_errors
else:
self.tx_errors = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 2)
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!L", self.duration_sec))
packed.append(struct.pack("!L", self.duration_nsec))
packed.append(struct.pack("!Q", self.rx_packets))
packed.append(struct.pack("!Q", self.tx_packets))
packed.append(struct.pack("!Q", self.rx_bytes))
packed.append(struct.pack("!Q", self.tx_bytes))
packed.append(struct.pack("!Q", self.rx_dropped))
packed.append(struct.pack("!Q", self.tx_dropped))
packed.append(struct.pack("!Q", self.rx_errors))
packed.append(struct.pack("!Q", self.tx_errors))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = port_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(2)
obj.port_no = util.unpack_port_no(reader)
obj.duration_sec = reader.read("!L")[0]
obj.duration_nsec = reader.read("!L")[0]
obj.rx_packets = reader.read("!Q")[0]
obj.tx_packets = reader.read("!Q")[0]
obj.rx_bytes = reader.read("!Q")[0]
obj.tx_bytes = reader.read("!Q")[0]
obj.rx_dropped = reader.read("!Q")[0]
obj.tx_dropped = reader.read("!Q")[0]
obj.rx_errors = reader.read("!Q")[0]
obj.tx_errors = reader.read("!Q")[0]
obj.properties = loxi.generic_util.unpack_list(reader, ofp.port_stats_prop.port_stats_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.rx_packets != other.rx_packets: return False
if self.tx_packets != other.tx_packets: return False
if self.rx_bytes != other.rx_bytes: return False
if self.tx_bytes != other.tx_bytes: return False
if self.rx_dropped != other.rx_dropped: return False
if self.tx_dropped != other.tx_dropped: return False
if self.rx_errors != other.rx_errors: return False
if self.tx_errors != other.tx_errors: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("port_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("duration_sec = ");
q.text("%#x" % self.duration_sec)
q.text(","); q.breakable()
q.text("duration_nsec = ");
q.text("%#x" % self.duration_nsec)
q.text(","); q.breakable()
q.text("rx_packets = ");
q.text("%#x" % self.rx_packets)
q.text(","); q.breakable()
q.text("tx_packets = ");
q.text("%#x" % self.tx_packets)
q.text(","); q.breakable()
q.text("rx_bytes = ");
q.text("%#x" % self.rx_bytes)
q.text(","); q.breakable()
q.text("tx_bytes = ");
q.text("%#x" % self.tx_bytes)
q.text(","); q.breakable()
q.text("rx_dropped = ");
q.text("%#x" % self.rx_dropped)
q.text(","); q.breakable()
q.text("tx_dropped = ");
q.text("%#x" % self.tx_dropped)
q.text(","); q.breakable()
q.text("rx_errors = ");
q.text("%#x" % self.rx_errors)
q.text(","); q.breakable()
q.text("tx_errors = ");
q.text("%#x" % self.tx_errors)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class queue_desc(loxi.OFObject):
def __init__(self, port_no=None, queue_id=None, properties=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if queue_id != None:
self.queue_id = queue_id
else:
self.queue_id = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.port_no))
packed.append(struct.pack("!L", self.queue_id))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 2
packed.append(b'\x00' * 6)
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[2] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = queue_desc()
obj.port_no = reader.read("!L")[0]
obj.queue_id = reader.read("!L")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 10)
reader.skip(6)
obj.properties = loxi.generic_util.unpack_list(reader, ofp.queue_desc_prop.queue_desc_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.queue_id != other.queue_id: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("queue_desc {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text("%#x" % self.port_no)
q.text(","); q.breakable()
q.text("queue_id = ");
q.text("%#x" % self.queue_id)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class queue_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(b'\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = queue_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = queue_prop()
obj.type = reader.read("!H")[0]
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("queue_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class queue_prop_experimenter(queue_prop):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, data=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if data != None:
self.data = data
else:
self.data = b''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(b'\x00' * 4)
packed.append(struct.pack("!L", self.experimenter))
packed.append(b'\x00' * 4)
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 8)
subclass = queue_prop_experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = queue_prop_experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
obj.experimenter = reader.read("!L")[0]
reader.skip(4)
obj.data = reader.read_all()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("queue_prop_experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
queue_prop.subtypes[65535] = queue_prop_experimenter
class queue_prop_max_rate(queue_prop):
type = 2
def __init__(self, rate=None):
if rate != None:
self.rate = rate
else:
self.rate = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(b'\x00' * 4)
packed.append(struct.pack("!H", self.rate))
packed.append(b'\x00' * 6)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = queue_prop_max_rate()
_type = reader.read("!H")[0]
assert(_type == 2)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
obj.rate = reader.read("!H")[0]
reader.skip(6)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.rate != other.rate: return False
return True
def pretty_print(self, q):
q.text("queue_prop_max_rate {")
with q.group():
with q.indent(2):
q.breakable()
q.text("rate = ");
q.text("%#x" % self.rate)
q.breakable()
q.text('}')
queue_prop.subtypes[2] = queue_prop_max_rate
class queue_prop_min_rate(queue_prop):
type = 1
def __init__(self, rate=None):
if rate != None:
self.rate = rate
else:
self.rate = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(b'\x00' * 4)
packed.append(struct.pack("!H", self.rate))
packed.append(b'\x00' * 6)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = queue_prop_min_rate()
_type = reader.read("!H")[0]
assert(_type == 1)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
obj.rate = reader.read("!H")[0]
reader.skip(6)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.rate != other.rate: return False
return True
def pretty_print(self, q):
q.text("queue_prop_min_rate {")
with q.group():
with q.indent(2):
q.breakable()
q.text("rate = ");
q.text("%#x" % self.rate)
q.breakable()
q.text('}')
queue_prop.subtypes[1] = queue_prop_min_rate
class queue_stats_entry(loxi.OFObject):
def __init__(self, port_no=None, queue_id=None, tx_bytes=None, tx_packets=None, tx_errors=None, duration_sec=None, duration_nsec=None, properties=None):
if port_no != None:
self.port_no = port_no
else:
self.port_no = 0
if queue_id != None:
self.queue_id = queue_id
else:
self.queue_id = 0
if tx_bytes != None:
self.tx_bytes = tx_bytes
else:
self.tx_bytes = 0
if tx_packets != None:
self.tx_packets = tx_packets
else:
self.tx_packets = 0
if tx_errors != None:
self.tx_errors = tx_errors
else:
self.tx_errors = 0
if duration_sec != None:
self.duration_sec = duration_sec
else:
self.duration_sec = 0
if duration_nsec != None:
self.duration_nsec = duration_nsec
else:
self.duration_nsec = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(b'\x00' * 6)
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack("!L", self.queue_id))
packed.append(struct.pack("!Q", self.tx_bytes))
packed.append(struct.pack("!Q", self.tx_packets))
packed.append(struct.pack("!Q", self.tx_errors))
packed.append(struct.pack("!L", self.duration_sec))
packed.append(struct.pack("!L", self.duration_nsec))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = queue_stats_entry()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
reader.skip(6)
obj.port_no = util.unpack_port_no(reader)
obj.queue_id = reader.read("!L")[0]
obj.tx_bytes = reader.read("!Q")[0]
obj.tx_packets = reader.read("!Q")[0]
obj.tx_errors = reader.read("!Q")[0]
obj.duration_sec = reader.read("!L")[0]
obj.duration_nsec = reader.read("!L")[0]
obj.properties = loxi.generic_util.unpack_list(reader, ofp.queue_stats_prop.queue_stats_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_no != other.port_no: return False
if self.queue_id != other.queue_id: return False
if self.tx_bytes != other.tx_bytes: return False
if self.tx_packets != other.tx_packets: return False
if self.tx_errors != other.tx_errors: return False
if self.duration_sec != other.duration_sec: return False
if self.duration_nsec != other.duration_nsec: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("queue_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_no = ");
q.text(util.pretty_port(self.port_no))
q.text(","); q.breakable()
q.text("queue_id = ");
q.text("%#x" % self.queue_id)
q.text(","); q.breakable()
q.text("tx_bytes = ");
q.text("%#x" % self.tx_bytes)
q.text(","); q.breakable()
q.text("tx_packets = ");
q.text("%#x" % self.tx_packets)
q.text(","); q.breakable()
q.text("tx_errors = ");
q.text("%#x" % self.tx_errors)
q.text(","); q.breakable()
q.text("duration_sec = ");
q.text("%#x" % self.duration_sec)
q.text(","); q.breakable()
q.text("duration_nsec = ");
q.text("%#x" % self.duration_nsec)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class stat_v6(loxi.OFObject):
def __init__(self, oxs_fields=None):
if oxs_fields != None:
self.oxs_fields = oxs_fields
else:
self.oxs_fields = []
return
def pack(self):
packed = []
packed.append(b'\x00' * 2)
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxs_fields))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
packed.append(loxi.generic_util.pad_to(8, length))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = stat_v6()
reader.skip(2)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxs_fields = loxi.generic_util.unpack_list(reader, ofp.common.oxs.unpack)
orig_reader.skip_align()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxs_fields != other.oxs_fields: return False
return True
def pretty_print(self, q):
q.text("stat_v6 {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxs_fields = ");
q.pp(self.oxs_fields)
q.breakable()
q.text('}')
class table_desc(loxi.OFObject):
def __init__(self, table_id=None, config=None, properties=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if config != None:
self.config = config
else:
self.config = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!B", self.table_id))
packed.append(b'\x00' * 1)
packed.append(struct.pack("!L", self.config))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_desc()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.table_id = reader.read("!B")[0]
reader.skip(1)
obj.config = reader.read("!L")[0]
obj.properties = loxi.generic_util.unpack_list(reader, ofp.table_mod_prop.table_mod_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.config != other.config: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("table_desc {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("config = ");
value_name_map = {3: 'OFPTC_DEPRECATED_MASK', 4: 'OFPTC_EVICTION', 8: 'OFPTC_VACANCY_EVENTS'}
q.text(util.pretty_flags(self.config, value_name_map.values()))
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class table_feature_prop(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = table_feature_prop.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = table_feature_prop()
obj.type = reader.read("!H")[0]
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class table_feature_prop_apply_actions(table_feature_prop):
type = 6
def __init__(self, action_ids=None):
if action_ids != None:
self.action_ids = action_ids
else:
self.action_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.action_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_apply_actions()
_type = reader.read("!H")[0]
assert(_type == 6)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.action_ids = loxi.generic_util.unpack_list(reader, ofp.action_id.action_id.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.action_ids != other.action_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_apply_actions {")
with q.group():
with q.indent(2):
q.breakable()
q.text("action_ids = ");
q.pp(self.action_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[6] = table_feature_prop_apply_actions
class table_feature_prop_apply_actions_miss(table_feature_prop):
type = 7
def __init__(self, action_ids=None):
if action_ids != None:
self.action_ids = action_ids
else:
self.action_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.action_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_apply_actions_miss()
_type = reader.read("!H")[0]
assert(_type == 7)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.action_ids = loxi.generic_util.unpack_list(reader, ofp.action_id.action_id.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.action_ids != other.action_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_apply_actions_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("action_ids = ");
q.pp(self.action_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[7] = table_feature_prop_apply_actions_miss
class table_feature_prop_apply_copyfield(table_feature_prop):
type = 20
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_apply_copyfield()
_type = reader.read("!H")[0]
assert(_type == 20)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_apply_copyfield {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[20] = table_feature_prop_apply_copyfield
class table_feature_prop_apply_copyfield_miss(table_feature_prop):
type = 21
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_apply_copyfield_miss()
_type = reader.read("!H")[0]
assert(_type == 21)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_apply_copyfield_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[21] = table_feature_prop_apply_copyfield_miss
class table_feature_prop_apply_setfield(table_feature_prop):
type = 14
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_apply_setfield()
_type = reader.read("!H")[0]
assert(_type == 14)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_apply_setfield {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[14] = table_feature_prop_apply_setfield
class table_feature_prop_apply_setfield_miss(table_feature_prop):
type = 15
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_apply_setfield_miss()
_type = reader.read("!H")[0]
assert(_type == 15)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_apply_setfield_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[15] = table_feature_prop_apply_setfield_miss
class table_feature_prop_experimenter(table_feature_prop):
subtypes = {}
type = 65534
def __init__(self, experimenter=None, subtype=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = table_feature_prop_experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = table_feature_prop_experimenter()
_type = reader.read("!H")[0]
assert(_type == 65534)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("subtype = ");
q.text("%#x" % self.subtype)
q.breakable()
q.text('}')
table_feature_prop.subtypes[65534] = table_feature_prop_experimenter
class table_feature_prop_experimenter_miss(table_feature_prop):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, subtype=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = table_feature_prop_experimenter_miss.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = table_feature_prop_experimenter_miss()
_type = reader.read("!H")[0]
assert(_type == 65535)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.subtype = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_experimenter_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("subtype = ");
q.text("%#x" % self.subtype)
q.breakable()
q.text('}')
table_feature_prop.subtypes[65535] = table_feature_prop_experimenter_miss
class table_feature_prop_instructions(table_feature_prop):
type = 0
def __init__(self, instruction_ids=None):
if instruction_ids != None:
self.instruction_ids = instruction_ids
else:
self.instruction_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.instruction_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_instructions()
_type = reader.read("!H")[0]
assert(_type == 0)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.instruction_ids = loxi.generic_util.unpack_list(reader, ofp.instruction_id.instruction_id.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.instruction_ids != other.instruction_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_instructions {")
with q.group():
with q.indent(2):
q.breakable()
q.text("instruction_ids = ");
q.pp(self.instruction_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[0] = table_feature_prop_instructions
class table_feature_prop_instructions_miss(table_feature_prop):
type = 1
def __init__(self, instruction_ids=None):
if instruction_ids != None:
self.instruction_ids = instruction_ids
else:
self.instruction_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.instruction_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_instructions_miss()
_type = reader.read("!H")[0]
assert(_type == 1)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.instruction_ids = loxi.generic_util.unpack_list(reader, ofp.instruction_id.instruction_id.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.instruction_ids != other.instruction_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_instructions_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("instruction_ids = ");
q.pp(self.instruction_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[1] = table_feature_prop_instructions_miss
class table_feature_prop_match(table_feature_prop):
type = 8
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_match()
_type = reader.read("!H")[0]
assert(_type == 8)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_match {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[8] = table_feature_prop_match
class table_feature_prop_next_tables(table_feature_prop):
type = 2
def __init__(self, next_table_ids=None):
if next_table_ids != None:
self.next_table_ids = next_table_ids
else:
self.next_table_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.next_table_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_next_tables()
_type = reader.read("!H")[0]
assert(_type == 2)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.next_table_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint8.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.next_table_ids != other.next_table_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_next_tables {")
with q.group():
with q.indent(2):
q.breakable()
q.text("next_table_ids = ");
q.pp(self.next_table_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[2] = table_feature_prop_next_tables
class table_feature_prop_next_tables_miss(table_feature_prop):
type = 3
def __init__(self, next_table_ids=None):
if next_table_ids != None:
self.next_table_ids = next_table_ids
else:
self.next_table_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.next_table_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_next_tables_miss()
_type = reader.read("!H")[0]
assert(_type == 3)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.next_table_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint8.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.next_table_ids != other.next_table_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_next_tables_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("next_table_ids = ");
q.pp(self.next_table_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[3] = table_feature_prop_next_tables_miss
class table_feature_prop_oxm_values(loxi.OFObject):
type = 22
def __init__(self, oxm_values=None):
if oxm_values != None:
self.oxm_values = oxm_values
else:
self.oxm_values = b''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(self.oxm_values)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_oxm_values()
_type = reader.read("!H")[0]
assert(_type == 22)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_values = reader.read_all()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_values != other.oxm_values: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_oxm_values {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_values = ");
q.pp(self.oxm_values)
q.breakable()
q.text('}')
class table_feature_prop_table_sync_from(table_feature_prop):
type = 16
def __init__(self, table_ids=None):
if table_ids != None:
self.table_ids = table_ids
else:
self.table_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.table_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_table_sync_from()
_type = reader.read("!H")[0]
assert(_type == 16)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.table_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint8.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_ids != other.table_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_table_sync_from {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_ids = ");
q.pp(self.table_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[16] = table_feature_prop_table_sync_from
class table_feature_prop_wildcards(table_feature_prop):
type = 10
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_wildcards()
_type = reader.read("!H")[0]
assert(_type == 10)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_wildcards {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[10] = table_feature_prop_wildcards
class table_feature_prop_write_actions(table_feature_prop):
type = 4
def __init__(self, action_ids=None):
if action_ids != None:
self.action_ids = action_ids
else:
self.action_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.action_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_write_actions()
_type = reader.read("!H")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.action_ids = loxi.generic_util.unpack_list(reader, ofp.action_id.action_id.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.action_ids != other.action_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_write_actions {")
with q.group():
with q.indent(2):
q.breakable()
q.text("action_ids = ");
q.pp(self.action_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[4] = table_feature_prop_write_actions
class table_feature_prop_write_actions_miss(table_feature_prop):
type = 5
def __init__(self, action_ids=None):
if action_ids != None:
self.action_ids = action_ids
else:
self.action_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.action_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_write_actions_miss()
_type = reader.read("!H")[0]
assert(_type == 5)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.action_ids = loxi.generic_util.unpack_list(reader, ofp.action_id.action_id.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.action_ids != other.action_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_write_actions_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("action_ids = ");
q.pp(self.action_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[5] = table_feature_prop_write_actions_miss
class table_feature_prop_write_copyfield(table_feature_prop):
type = 18
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_write_copyfield()
_type = reader.read("!H")[0]
assert(_type == 18)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_write_copyfield {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[18] = table_feature_prop_write_copyfield
class table_feature_prop_write_copyfield_miss(table_feature_prop):
type = 19
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_write_copyfield_miss()
_type = reader.read("!H")[0]
assert(_type == 19)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_write_copyfield_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[19] = table_feature_prop_write_copyfield_miss
class table_feature_prop_write_setfield(table_feature_prop):
type = 12
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_write_setfield()
_type = reader.read("!H")[0]
assert(_type == 12)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_write_setfield {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[12] = table_feature_prop_write_setfield
class table_feature_prop_write_setfield_miss(table_feature_prop):
type = 13
def __init__(self, oxm_ids=None):
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_feature_prop_write_setfield_miss()
_type = reader.read("!H")[0]
assert(_type == 13)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint32.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("table_feature_prop_write_setfield_miss {")
with q.group():
with q.indent(2):
q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
table_feature_prop.subtypes[13] = table_feature_prop_write_setfield_miss
class table_features(loxi.OFObject):
def __init__(self, table_id=None, command=None, features=None, name=None, metadata_match=None, metadata_write=None, capabilities=None, max_entries=None, properties=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if command != None:
self.command = command
else:
self.command = 0
if features != None:
self.features = features
else:
self.features = 0
if name != None:
self.name = name
else:
self.name = ""
if metadata_match != None:
self.metadata_match = metadata_match
else:
self.metadata_match = 0
if metadata_write != None:
self.metadata_write = metadata_write
else:
self.metadata_write = 0
if capabilities != None:
self.capabilities = capabilities
else:
self.capabilities = 0
if max_entries != None:
self.max_entries = max_entries
else:
self.max_entries = 0
if properties != None:
self.properties = properties
else:
self.properties = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", 0)) # placeholder for length at index 0
packed.append(struct.pack("!B", self.table_id))
packed.append(struct.pack("!B", self.command))
packed.append(struct.pack("!L", self.features))
packed.append(struct.pack("!32s", self.name.encode()))
packed.append(struct.pack("!Q", self.metadata_match))
packed.append(struct.pack("!Q", self.metadata_write))
packed.append(struct.pack("!L", self.capabilities))
packed.append(struct.pack("!L", self.max_entries))
packed.append(loxi.generic_util.pack_list(self.properties))
length = sum([len(x) for x in packed])
packed[0] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_features()
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.table_id = reader.read("!B")[0]
obj.command = reader.read("!B")[0]
obj.features = reader.read("!L")[0]
obj.name = reader.read("!32s")[0].decode().rstrip("\x00")
obj.metadata_match = reader.read("!Q")[0]
obj.metadata_write = reader.read("!Q")[0]
obj.capabilities = reader.read("!L")[0]
obj.max_entries = reader.read("!L")[0]
obj.properties = loxi.generic_util.unpack_list(reader, ofp.common.table_feature_prop.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.command != other.command: return False
if self.features != other.features: return False
if self.name != other.name: return False
if self.metadata_match != other.metadata_match: return False
if self.metadata_write != other.metadata_write: return False
if self.capabilities != other.capabilities: return False
if self.max_entries != other.max_entries: return False
if self.properties != other.properties: return False
return True
def pretty_print(self, q):
q.text("table_features {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("command = ");
value_name_map = {0: 'OFPTFC_REPLACE', 1: 'OFPTFC_MODIFY', 2: 'OFPTFC_ENABLE', 3: 'OFPTFC_DISABLE'}
if self.command in value_name_map:
q.text("%s(%d)" % (value_name_map[self.command], self.command))
else:
q.text("%#x" % self.command)
q.text(","); q.breakable()
q.text("features = ");
value_name_map = {1: 'OFPTFF_INGRESS_TABLE', 2: 'OFPTFF_EGRESS_TABLE', 16: 'OFPTFF_FIRST_EGRESS'}
q.text(util.pretty_flags(self.features, value_name_map.values()))
q.text(","); q.breakable()
q.text("name = ");
q.pp(self.name)
q.text(","); q.breakable()
q.text("metadata_match = ");
q.text("%#x" % self.metadata_match)
q.text(","); q.breakable()
q.text("metadata_write = ");
q.text("%#x" % self.metadata_write)
q.text(","); q.breakable()
q.text("capabilities = ");
value_name_map = {3: 'OFPTC_DEPRECATED_MASK', 4: 'OFPTC_EVICTION', 8: 'OFPTC_VACANCY_EVENTS'}
q.text(util.pretty_flags(self.capabilities, value_name_map.values()))
q.text(","); q.breakable()
q.text("max_entries = ");
q.text("%#x" % self.max_entries)
q.text(","); q.breakable()
q.text("properties = ");
q.pp(self.properties)
q.breakable()
q.text('}')
class table_stats_entry(loxi.OFObject):
def __init__(self, table_id=None, active_count=None, lookup_count=None, matched_count=None):
if table_id != None:
self.table_id = table_id
else:
self.table_id = 0
if active_count != None:
self.active_count = active_count
else:
self.active_count = 0
if lookup_count != None:
self.lookup_count = lookup_count
else:
self.lookup_count = 0
if matched_count != None:
self.matched_count = matched_count
else:
self.matched_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.table_id))
packed.append(b'\x00' * 3)
packed.append(struct.pack("!L", self.active_count))
packed.append(struct.pack("!Q", self.lookup_count))
packed.append(struct.pack("!Q", self.matched_count))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = table_stats_entry()
obj.table_id = reader.read("!B")[0]
reader.skip(3)
obj.active_count = reader.read("!L")[0]
obj.lookup_count = reader.read("!Q")[0]
obj.matched_count = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.table_id != other.table_id: return False
if self.active_count != other.active_count: return False
if self.lookup_count != other.lookup_count: return False
if self.matched_count != other.matched_count: return False
return True
def pretty_print(self, q):
q.text("table_stats_entry {")
with q.group():
with q.indent(2):
q.breakable()
q.text("table_id = ");
q.text("%#x" % self.table_id)
q.text(","); q.breakable()
q.text("active_count = ");
q.text("%#x" % self.active_count)
q.text(","); q.breakable()
q.text("lookup_count = ");
q.text("%#x" % self.lookup_count)
q.text(","); q.breakable()
q.text("matched_count = ");
q.text("%#x" % self.matched_count)
q.breakable()
q.text('}')
class uint32(loxi.OFObject):
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!L", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = uint32()
obj.value = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("uint32 {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
class uint64(loxi.OFObject):
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!Q", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = uint64()
obj.value = reader.read("!Q")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("uint64 {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
class uint8(loxi.OFObject):
def __init__(self, value=None):
if value != None:
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!B", self.value))
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = uint8()
obj.value = reader.read("!B")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.value != other.value: return False
return True
def pretty_print(self, q):
q.text("uint8 {")
with q.group():
with q.indent(2):
q.breakable()
q.text("value = ");
q.text("%#x" % self.value)
q.breakable()
q.text('}')
class _action_copy_field(loxi.OFObject):
type = 28
def __init__(self, n_bits=None, src_offset=None, dst_offset=None, oxm_ids=None):
if n_bits != None:
self.n_bits = n_bits
else:
self.n_bits = 0
if src_offset != None:
self.src_offset = src_offset
else:
self.src_offset = 0
if dst_offset != None:
self.dst_offset = dst_offset
else:
self.dst_offset = 0
if oxm_ids != None:
self.oxm_ids = oxm_ids
else:
self.oxm_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.n_bits))
packed.append(struct.pack("!H", self.src_offset))
packed.append(struct.pack("!H", self.dst_offset))
packed.append(b'\x00' * 2)
packed.append(loxi.generic_util.pack_list(self.oxm_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = _action_copy_field()
_type = reader.read("!H")[0]
assert(_type == 28)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.n_bits = reader.read("!H")[0]
obj.src_offset = reader.read("!H")[0]
obj.dst_offset = reader.read("!H")[0]
reader.skip(2)
obj.oxm_ids = loxi.generic_util.unpack_list(reader, ofp.oxm.oxm.unpack)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.n_bits != other.n_bits: return False
if self.src_offset != other.src_offset: return False
if self.dst_offset != other.dst_offset: return False
if self.oxm_ids != other.oxm_ids: return False
return True
def pretty_print(self, q):
q.text("_action_copy_field {")
with q.group():
with q.indent(2):
q.breakable()
q.text("n_bits = ");
q.text("%#x" % self.n_bits)
q.text(","); q.breakable()
q.text("src_offset = ");
q.text("%#x" % self.src_offset)
q.text(","); q.breakable()
q.text("dst_offset = ");
q.text("%#x" % self.dst_offset)
q.text(","); q.breakable()
q.text("oxm_ids = ");
q.pp(self.oxm_ids)
q.breakable()
q.text('}')
class _controller_status_prop_experimenter(controller_status_prop):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, exp_type=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if exp_type != None:
self.exp_type = exp_type
else:
self.exp_type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.exp_type))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = _controller_status_prop_experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = _controller_status_prop_experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.experimenter = reader.read("!L")[0]
obj.exp_type = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.exp_type != other.exp_type: return False
return True
def pretty_print(self, q):
q.text("_controller_status_prop_experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("exp_type = ");
q.text("%#x" % self.exp_type)
q.breakable()
q.text('}')
controller_status_prop.subtypes[65535] = _controller_status_prop_experimenter
class _controller_status_prop_uri(controller_status_prop):
type = 0
def __init__(self, uri=None):
if uri != None:
self.uri = uri
else:
self.uri = loxi.unimplemented('init of_controller_uri_t')
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(loxi.unimplemented('pack of_controller_uri_t'))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = _controller_status_prop_uri()
_type = reader.read("!H")[0]
assert(_type == 0)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.uri = loxi.unimplemented('unpack of_controller_uri_t')
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.uri != other.uri: return False
return True
def pretty_print(self, q):
q.text("_controller_status_prop_uri {")
with q.group():
with q.indent(2):
q.breakable()
q.text("uri = ");
q.pp(self.uri)
q.breakable()
q.text('}')
controller_status_prop.subtypes[0] = _controller_status_prop_uri
class _port_desc_prop_recirculate(loxi.OFObject):
type = 4
def __init__(self, port_nos=None):
if port_nos != None:
self.port_nos = port_nos
else:
self.port_nos = b''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for length at index 1
packed.append(self.port_nos)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return functools.reduce(lambda x,y: x+y, packed)
@staticmethod
def unpack(reader):
obj = _port_desc_prop_recirculate()
_type = reader.read("!H")[0]
assert(_type == 4)
_length = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.port_nos = reader.read_all()
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port_nos != other.port_nos: return False
return True
def pretty_print(self, q):
q.text("_port_desc_prop_recirculate {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port_nos = ");
q.pp(self.port_nos)
q.breakable()
q.text('}')
match = match_v3
stat = stat_v6
| 33.631596
| 288
| 0.55368
| 25,511
| 202,664
| 4.207205
| 0.015562
| 0.035079
| 0.035358
| 0.046958
| 0.864139
| 0.833225
| 0.797475
| 0.768303
| 0.741955
| 0.72783
| 0
| 0.012995
| 0.314989
| 202,664
| 6,025
| 289
| 33.637178
| 0.760126
| 0.01462
| 0
| 0.761833
| 0
| 0
| 0.046244
| 0.010285
| 0
| 0
| 0
| 0
| 0.008709
| 1
| 0.094661
| false
| 0
| 0.00284
| 0
| 0.206929
| 0.018932
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f6e5a2764ddc88181c95264a074baf6afa327e9
| 1,035
|
py
|
Python
|
test/test-GetBetweennessCentr.py
|
Cam2337/snap-python
|
0bf722b461f8b5aae3ecb2757313521e9e9e76f1
|
[
"BSD-3-Clause"
] | 242
|
2015-01-01T08:40:28.000Z
|
2022-03-18T05:22:09.000Z
|
test/test-GetBetweennessCentr.py
|
Cam2337/snap-python
|
0bf722b461f8b5aae3ecb2757313521e9e9e76f1
|
[
"BSD-3-Clause"
] | 99
|
2015-01-24T07:55:27.000Z
|
2021-10-30T18:20:13.000Z
|
test/test-GetBetweennessCentr.py
|
Cam2337/snap-python
|
0bf722b461f8b5aae3ecb2757313521e9e9e76f1
|
[
"BSD-3-Clause"
] | 105
|
2015-03-03T06:45:17.000Z
|
2022-02-24T15:52:40.000Z
|
import snap
Graph = snap.GenRndGnm(snap.PNGraph, 100, 1000)
Nodes = snap.TIntFltH()
Edges = snap.TIntPrFltH()
snap.GetBetweennessCentr(Graph, Nodes, Edges, 1.0)
for node in Nodes:
print("node: %d centrality: %f" % (node, Nodes[node]))
for edge in Edges:
print("edge: (%d, %d) centrality: %f" % (edge.GetVal1(), edge.GetVal2(), Edges[edge]))
UGraph = snap.GenRndGnm(snap.PUNGraph, 100, 1000)
Nodes = snap.TIntFltH()
Edges = snap.TIntPrFltH()
snap.GetBetweennessCentr(UGraph, Nodes, Edges, 1.0)
for node in Nodes:
print("node: %d centrality: %f" % (node, Nodes[node]))
for edge in Edges:
print("edge: (%d, %d) centrality: %f" % (edge.GetVal1(), edge.GetVal2(), Edges[edge]))
Network = snap.GenRndGnm(snap.PNEANet, 100, 1000)
Nodes = snap.TIntFltH()
Edges = snap.TIntPrFltH()
snap.GetBetweennessCentr(Network, Nodes, Edges, 1.0)
for node in Nodes:
print("node: %d centrality: %f" % (node, Nodes[node]))
for edge in Edges:
print("edge: (%d, %d) centrality: %f" % (edge.GetVal1(), edge.GetVal2(), Edges[edge]))
| 33.387097
| 90
| 0.674396
| 146
| 1,035
| 4.780822
| 0.191781
| 0.094556
| 0.103152
| 0.068768
| 0.829513
| 0.829513
| 0.829513
| 0.829513
| 0.829513
| 0.829513
| 0
| 0.037204
| 0.142995
| 1,035
| 30
| 91
| 34.5
| 0.749718
| 0
| 0
| 0.72
| 0
| 0
| 0.151016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.04
| 0
| 0.04
| 0.24
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f8715aae2da811915416f6b66c80204c552b096
| 205
|
py
|
Python
|
platforms/winpack_dldt/2021.4/patch.config.py
|
xipingyan/opencv
|
39c3334147ec02761b117f180c9c4518be18d1fa
|
[
"Apache-2.0"
] | 56,632
|
2016-07-04T16:36:08.000Z
|
2022-03-31T18:38:14.000Z
|
platforms/winpack_dldt/2021.4/patch.config.py
|
yusufm423/opencv
|
6a2077cbd8a8a0d8cbd3e0e8c3ca239f17e6c067
|
[
"Apache-2.0"
] | 13,593
|
2016-07-04T13:59:03.000Z
|
2022-03-31T21:04:51.000Z
|
platforms/winpack_dldt/2021.4/patch.config.py
|
yusufm423/opencv
|
6a2077cbd8a8a0d8cbd3e0e8c3ca239f17e6c067
|
[
"Apache-2.0"
] | 54,986
|
2016-07-04T14:24:38.000Z
|
2022-03-31T22:51:18.000Z
|
applyPatch('20210630-dldt-disable-unused-targets.patch')
applyPatch('20210630-dldt-pdb.patch')
applyPatch('20210630-dldt-disable-multidevice-autoplugin.patch')
applyPatch('20210630-dldt-vs-version.patch')
| 41
| 64
| 0.819512
| 25
| 205
| 6.72
| 0.48
| 0.428571
| 0.52381
| 0.482143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159204
| 0.019512
| 205
| 4
| 65
| 51.25
| 0.676617
| 0
| 0
| 0
| 0
| 0
| 0.707317
| 0.707317
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6b60855ecd1cec26ab16490b6746ce11574640f
| 13,372
|
py
|
Python
|
tests/problems.py
|
emaballarin/torchsde
|
83373b30c9bd447ec32a8c286c42a4cf5e9753a6
|
[
"Apache-2.0"
] | 984
|
2020-07-06T23:15:17.000Z
|
2022-03-31T10:09:49.000Z
|
tests/problems.py
|
GabrielNobis/torchsde
|
53038a3efcd77f6c9f3cfd0310700a59be5d5d2d
|
[
"Apache-2.0"
] | 95
|
2020-07-11T10:53:02.000Z
|
2022-03-30T21:33:56.000Z
|
tests/problems.py
|
GabrielNobis/torchsde
|
53038a3efcd77f6c9f3cfd0310700a59be5d5d2d
|
[
"Apache-2.0"
] | 117
|
2020-07-07T20:05:05.000Z
|
2022-03-20T21:30:23.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Problems of different noise types.
Each example is of a particular noise type.
ExDiagonal, ExScalar, ExAdditive are examples 1-3 from
Rackauckas, Christopher, and Qing Nie. "Adaptive methods for stochastic
differential equations via natural embeddings and rejection sampling with memory."
Discrete and continuous dynamical systems. Series B 22.7 (2017): 2731.
Neural* all use simple neural networks.
BasicSDE1-4 are problems where the drift and diffusion may not depend on
trainable parameters.
CustomNamesSDE and CustomNamesSDELogqp are used to test the argument `names`.
"""
import torch
from torch import nn
from torchsde import BaseSDE, SDEIto
from torchsde.settings import NOISE_TYPES, SDE_TYPES
class ExDiagonal(BaseSDE):
noise_type = NOISE_TYPES.diagonal
def __init__(self, d, sde_type=SDE_TYPES.ito, **kwargs):
super(ExDiagonal, self).__init__(sde_type=sde_type, noise_type=ExDiagonal.noise_type)
self._nfe = 0
# Use non-exploding initialization.
sigma = torch.sigmoid(torch.randn(d))
mu = -sigma ** 2 - torch.sigmoid(torch.randn(d))
self.mu = nn.Parameter(mu, requires_grad=True)
self.sigma = nn.Parameter(sigma, requires_grad=True)
self.f = self.f_ito if sde_type == SDE_TYPES.ito else self.f_stratonovich
def f_ito(self, t, y):
self._nfe += 1
return self.mu * y
def f_stratonovich(self, t, y):
self._nfe += 1
return self.mu * y - .5 * (self.sigma ** 2) * y
def g(self, t, y):
self._nfe += 1
return self.sigma * y
def h(self, t, y):
self._nfe += 1
return torch.zeros_like(y)
@property
def nfe(self):
return self._nfe
class ExScalar(BaseSDE):
noise_type = NOISE_TYPES.scalar
def __init__(self, d, sde_type=SDE_TYPES.ito, **kwargs):
super(ExScalar, self).__init__(sde_type=sde_type, noise_type=ExScalar.noise_type)
self._nfe = 0
self.p = nn.Parameter(torch.sigmoid(torch.randn(d)), requires_grad=True)
self.f = self.f_ito if sde_type == SDE_TYPES.ito else self.f_stratonovich
def f_ito(self, t, y):
self._nfe += 1
return -self.p ** 2. * torch.sin(y) * torch.cos(y) ** 3.
def f_stratonovich(self, t, y):
self._nfe += 1
return torch.zeros_like(y)
def g(self, t, y):
self._nfe += 1
return (self.p * torch.cos(y) ** 2).unsqueeze(dim=-1)
def h(self, t, y):
self._nfe += 1
return torch.zeros_like(y)
@property
def nfe(self):
return self._nfe
class ExAdditive(BaseSDE):
noise_type = NOISE_TYPES.additive
def __init__(self, d, m, sde_type=SDE_TYPES.ito, **kwargs):
super(ExAdditive, self).__init__(sde_type=sde_type, noise_type=ExAdditive.noise_type)
self._nfe = 0
self.m = m
self.a = nn.Parameter(torch.sigmoid(torch.randn(d)), requires_grad=True)
self.b = nn.Parameter(torch.sigmoid(torch.randn(d)), requires_grad=True)
def f(self, t, y):
self._nfe += 1
return self.b / torch.sqrt(1. + t) - y / (2. + 2. * t)
def g(self, t, y):
self._nfe += 1
fill_value = self.a * self.b / torch.sqrt(1. + t)
return fill_value.unsqueeze(dim=0).unsqueeze(dim=-1).repeat(y.size(0), 1, self.m)
def h(self, t, y):
self._nfe += 1
return torch.zeros_like(y)
@property
def nfe(self):
return self._nfe
class NeuralDiagonal(BaseSDE):
noise_type = NOISE_TYPES.diagonal
def __init__(self, d, sde_type=SDE_TYPES.ito, **kwargs):
super(NeuralDiagonal, self).__init__(sde_type=sde_type, noise_type=NeuralDiagonal.noise_type)
self.f_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d)
)
self.g_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d),
nn.Sigmoid()
)
def f(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return self.f_net(ty)
def g(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return 0.1 * self.g_net(ty) # small noise makes passing adjoint tests easier/possible
def h(self, t, y):
return torch.zeros_like(y)
class NeuralScalar(BaseSDE):
noise_type = NOISE_TYPES.scalar
def __init__(self, d, sde_type=SDE_TYPES.ito, **kwargs):
super(NeuralScalar, self).__init__(sde_type=sde_type, noise_type=NeuralScalar.noise_type)
self.f_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d)
)
self.g_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d),
nn.Sigmoid()
)
def f(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return self.f_net(ty)
def g(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return 0.1 * self.g_net(ty).unsqueeze(-1) # small noise makes passing adjoint tests easier/possible
def h(self, t, y):
return torch.zeros_like(y)
class NeuralAdditive(BaseSDE):
noise_type = NOISE_TYPES.additive
def __init__(self, d, m, sde_type=SDE_TYPES.ito, **kwargs):
super(NeuralAdditive, self).__init__(sde_type=sde_type, noise_type=NeuralAdditive.noise_type)
self.d = d
self.m = m
self.f_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d)
)
self.g_net = nn.Sequential(
nn.Linear(1, 8),
nn.Softplus(),
nn.Linear(8, d * m),
nn.Sigmoid()
)
def f(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return self.f_net(ty)
def g(self, t, y):
return self.g_net(t.expand(y.size(0), 1)).view(y.size(0), self.d, self.m)
def h(self, t, y):
return torch.zeros_like(y)
class NeuralGeneral(BaseSDE):
noise_type = NOISE_TYPES.general
def __init__(self, d, m, sde_type=SDE_TYPES.ito, **kwargs):
super(NeuralGeneral, self).__init__(sde_type=sde_type, noise_type=NeuralGeneral.noise_type)
self.d = d
self.m = m
self.f_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d)
)
self.g_net = nn.Sequential(
nn.Linear(d + 1, 8),
nn.Softplus(),
nn.Linear(8, d * m),
nn.Sigmoid()
)
def f(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return self.f_net(ty)
def g(self, t, y):
ty = torch.cat([t.expand(y.size(0), 1), y], dim=1)
return self.g_net(ty).reshape(y.size(0), self.d, self.m)
def h(self, t, y):
return torch.zeros_like(y)
class BasicSDE1(SDEIto):
def __init__(self, d=10):
super(BasicSDE1, self).__init__(noise_type="diagonal")
self.shared_param = nn.Parameter(torch.randn(1, d), requires_grad=True)
self.no_grad_param = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param1 = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param2 = nn.Parameter(torch.randn(1, d), requires_grad=True)
def f(self, t, y):
return self.shared_param * torch.sin(y) * 0.2 + torch.cos(y ** 2.) * 0.1 + torch.cos(t) + self.no_grad_param * y
def g(self, t, y):
return torch.sigmoid(self.shared_param * torch.cos(y) * .3 + torch.sin(t)) + torch.sigmoid(
self.no_grad_param * y) + 0.1
def h(self, t, y):
return torch.sigmoid(y)
class BasicSDE2(SDEIto):
def __init__(self, d=10):
super(BasicSDE2, self).__init__(noise_type="diagonal")
self.shared_param = nn.Parameter(torch.randn(1, d), requires_grad=True)
self.no_grad_param = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param1 = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param2 = nn.Parameter(torch.randn(1, d), requires_grad=True)
def f(self, t, y):
return self.shared_param * 0.2 + self.no_grad_param + torch.zeros_like(y)
def g(self, t, y):
return torch.sigmoid(self.shared_param * .3) + torch.sigmoid(self.no_grad_param) + torch.zeros_like(y) + 0.1
def h(self, t, y):
return torch.sigmoid(y)
class BasicSDE3(SDEIto):
def __init__(self, d=10):
super(BasicSDE3, self).__init__(noise_type="diagonal")
self.shared_param = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.no_grad_param = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param1 = nn.Parameter(torch.randn(1, d), requires_grad=True)
self.unused_param2 = nn.Parameter(torch.randn(1, d), requires_grad=False)
def f(self, t, y):
return self.shared_param * 0.2 + self.no_grad_param + torch.zeros_like(y)
def g(self, t, y):
return torch.sigmoid(self.shared_param * .3) + torch.sigmoid(self.no_grad_param) + torch.zeros_like(y) + 0.1
def h(self, t, y):
return torch.sigmoid(y)
class BasicSDE4(SDEIto):
def __init__(self, d=10):
super(BasicSDE4, self).__init__(noise_type="diagonal")
self.shared_param = nn.Parameter(torch.randn(1, d), requires_grad=True)
self.no_grad_param = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param1 = nn.Parameter(torch.randn(1, d), requires_grad=False)
self.unused_param2 = nn.Parameter(torch.randn(1, d), requires_grad=True)
def f(self, t, y):
return torch.zeros_like(y).fill_(0.1)
def g(self, t, y):
return torch.sigmoid(torch.zeros_like(y)) + 0.1
def h(self, t, y):
return torch.sigmoid(y)
class CustomNamesSDE(SDEIto):
def __init__(self):
super(CustomNamesSDE, self).__init__(noise_type="diagonal")
def forward(self, t, y):
return y * t
def g(self, t, y):
return torch.sigmoid(t * y)
class CustomNamesSDELogqp(SDEIto):
def __init__(self):
super(CustomNamesSDELogqp, self).__init__(noise_type="diagonal")
def forward(self, t, y):
return y * t
def g(self, t, y):
return torch.sigmoid(t * y)
def w(self, t, y):
return y * t
class FGSDE(torch.nn.Module):
noise_type = 'general'
def __init__(self, sde_type, vector):
super(FGSDE, self).__init__()
self.sde_type = sde_type
self.register_buffer('vector', vector)
def f(self, t, y):
return -y
def g(self, t, y):
return y.unsqueeze(-1).sigmoid() * self.vector
class FAndGSDE(torch.nn.Module):
noise_type = 'general'
def __init__(self, sde_type, vector):
super(FAndGSDE, self).__init__()
self.sde_type = sde_type
self.register_buffer('vector', vector)
def f_and_g(self, t, y):
return -y, y.unsqueeze(-1).sigmoid() * self.vector
class GProdSDE(torch.nn.Module):
noise_type = 'general'
def __init__(self, sde_type, vector):
super(GProdSDE, self).__init__()
self.sde_type = sde_type
self.register_buffer('vector', vector)
def f(self, t, y):
return -y
def g_prod(self, t, y, v):
return (y.unsqueeze(-1).sigmoid() * self.vector).bmm(v.unsqueeze(-1)).squeeze(-1)
class FAndGProdSDE(torch.nn.Module):
noise_type = 'general'
def __init__(self, sde_type, vector):
super(FAndGProdSDE, self).__init__()
self.sde_type = sde_type
self.register_buffer('vector', vector)
def f_and_g_prod(self, t, y, v):
return -y, (y.unsqueeze(-1).sigmoid() * self.vector).bmm(v.unsqueeze(-1)).squeeze(-1)
class FAndGGProdSDE1(torch.nn.Module):
noise_type = 'general'
def __init__(self, sde_type, vector):
super(FAndGGProdSDE1, self).__init__()
self.sde_type = sde_type
self.register_buffer('vector', vector)
def f_and_g(self, t, y):
return -y, y.unsqueeze(-1).sigmoid() * self.vector
def g_prod(self, t, y, v):
return (y.unsqueeze(-1).sigmoid() * self.vector).bmm(v.unsqueeze(-1)).squeeze(-1)
class FAndGGProdSDE2(torch.nn.Module):
noise_type = 'general'
def __init__(self, sde_type, vector):
super(FAndGGProdSDE2, self).__init__()
self.sde_type = sde_type
self.register_buffer('vector', vector)
def f(self, t, y):
return -y
def f_and_g(self, t, y):
return -y, y.unsqueeze(-1).sigmoid() * self.vector
def g_prod(self, t, y, v):
return (y.unsqueeze(-1).sigmoid() * self.vector).bmm(v.unsqueeze(-1)).squeeze(-1)
| 30.321995
| 120
| 0.617185
| 1,986
| 13,372
| 3.964753
| 0.113797
| 0.013716
| 0.038862
| 0.044196
| 0.762637
| 0.743332
| 0.726441
| 0.706756
| 0.666878
| 0.664719
| 0
| 0.018662
| 0.246635
| 13,372
| 440
| 121
| 30.390909
| 0.762954
| 0.095199
| 0
| 0.717687
| 0
| 0
| 0.010433
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.248299
| false
| 0
| 0.013605
| 0.122449
| 0.554422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
63e3051d819d9a325a4f85fc823ba7f2bfcc9649
| 3,085
|
py
|
Python
|
netbox/extras/migrations/0071_standardize_id_fields.py
|
cybarox/netbox
|
ea197eff5f4fe925bb354d1375912decd81752bd
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/migrations/0071_standardize_id_fields.py
|
cybarox/netbox
|
ea197eff5f4fe925bb354d1375912decd81752bd
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/migrations/0071_standardize_id_fields.py
|
cybarox/netbox
|
ea197eff5f4fe925bb354d1375912decd81752bd
|
[
"Apache-2.0"
] | null | null | null |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('extras', '0070_customlink_enabled'),
]
operations = [
# Model IDs
migrations.AlterField(
model_name='configcontext',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='configrevision',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='customfield',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='customlink',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='exporttemplate',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='imageattachment',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='jobresult',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='journalentry',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='objectchange',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='taggeditem',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='webhook',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
),
# GFK IDs
migrations.AlterField(
model_name='imageattachment',
name='object_id',
field=models.PositiveBigIntegerField(),
),
migrations.AlterField(
model_name='journalentry',
name='assigned_object_id',
field=models.PositiveBigIntegerField(),
),
migrations.AlterField(
model_name='objectchange',
name='changed_object_id',
field=models.PositiveBigIntegerField(),
),
migrations.AlterField(
model_name='objectchange',
name='related_object_id',
field=models.PositiveBigIntegerField(blank=True, null=True),
),
]
| 34.277778
| 92
| 0.584765
| 269
| 3,085
| 6.535316
| 0.171004
| 0.170648
| 0.213311
| 0.24744
| 0.857224
| 0.813424
| 0.754835
| 0.754835
| 0.754835
| 0.714448
| 0
| 0.00187
| 0.306645
| 3,085
| 89
| 93
| 34.662921
| 0.820009
| 0.005511
| 0
| 0.756098
| 0
| 0
| 0.094617
| 0.007504
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012195
| 0
| 0.04878
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
63f05bb1002eae738179e8fc20b77f8c5fecfed1
| 34,183
|
py
|
Python
|
pandapower/test/opf/test_basic.py
|
mathildebadoual/pandapower
|
9ba4bcb78e84b644d2ba6df0c08e285c54af8ddc
|
[
"BSD-3-Clause"
] | 1
|
2020-10-19T06:39:15.000Z
|
2020-10-19T06:39:15.000Z
|
pandapower/test/opf/test_basic.py
|
miek770/pandapower
|
de004efc1b7432a633792af4f551f7635a02db47
|
[
"BSD-3-Clause"
] | null | null | null |
pandapower/test/opf/test_basic.py
|
miek770/pandapower
|
de004efc1b7432a633792af4f551f7635a02db47
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2018 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
import pytest
import numpy as np
import pandapower as pp
from pandapower.test.toolbox import add_grid_connection
from pandapower.toolbox import convert_format
try:
import pplog as logging
except ImportError:
import logging
logger = logging.getLogger(__name__)
@pytest.fixture
def simple_opf_test_net():
net = pp.create_empty_network()
pp.create_bus(net, vn_kv=10.)
pp.create_bus(net, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100)
return net
def test_convert_format():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
net.gen["cost_per_kw"] = 100
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
# run OPF
convert_format(net)
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_simplest_voltage():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100)
pp.create_polynomial_cost(net, 0, "gen", np.array([100, 0]))
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
pp.runopp(net, verbose=False, check_connectivity=True)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_eg_voltage():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0, vm_pu=1.01)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100)
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert net.res_bus.vm_pu.at[0] == net.ext_grid.vm_pu.values
def test_simplest_dispatch():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_polynomial_cost(net, 0, "gen", np.array([100, 0]))
pp.create_ext_grid(net, 0)
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([101, 0]))
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, cost_function="linear", verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_est_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_opf_gen_voltage():
""" Testing a simple network with transformer for voltage
constraints with OPF using a generator """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# ceate net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75,
tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0,
vn_hv_kv=10.0, vscr_percent=2.8125,
tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751,
sn_kva=16.0, pfe_kw=0.11, name=None,
in_service=True, index=None, max_loading_percent=200)
pp.create_gen(net, 3, p_kw=-10, controllable=True, max_p_kw=0, min_p_kw=-25, max_q_kvar=500,
min_q_kvar=-500)
pp.create_polynomial_cost(net, 0, "gen", np.array([10, 0]))
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100000)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100000)
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_opf_gen_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_opf_sgen_voltage():
""" Testing a simple network with transformer for voltage
constraints with OPF using a static generator """
# boundaries
vm_max = 1.04
vm_min = 0.96
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75,
tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0,
vn_hv_kv=10.0, vscr_percent=2.8125,
tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751,
sn_kva=16.0, pfe_kw=0.11, name=None,
in_service=True, index=None, max_loading_percent=1000000)
pp.create_sgen(net, 3, p_kw=-10, controllable=True, max_p_kw=-5, min_p_kw=-15, max_q_kvar=25,
min_q_kvar=-25)
pp.create_polynomial_cost(net, 0, "sgen", np.array([100, 0]))
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=1000000)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=1000000)
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# assert and check result
logger.debug("test_opf_sgen_voltage")
logger.debug("res_sgen:\n%s" % net.res_sgen)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_opf_gen_loading():
""" Testing a simple network with transformer for loading
constraints with OPF using a generator """
# wide open voltage boundaries to make sure they don't interfere with loading constraints
vm_max = 1.5
vm_min = 0.5
max_line_loading = 11
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75,
tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0,
vn_hv_kv=10.0, vscr_percent=2.8125,
tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751,
sn_kva=16.0, pfe_kw=0.11, name=None,
in_service=True, index=None, max_loading_percent=145)
pp.create_gen(net, 3, p_kw=-10, controllable=True, max_p_kw=-5, min_p_kw=-15, max_q_kvar=50,
min_q_kvar=-50)
pp.create_polynomial_cost(net, 0, "gen", np.array([-10, 0]))
pp.create_ext_grid(net, 0)
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([.1, 0]))
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
# run OPF
pp.runopp(net, verbose=False, OPF_VIOLATION=1e-1, OUT_LIM_LINE=2,
PDIPM_GRADTOL=1e-10, PDIPM_COMPTOL=1e-10, PDIPM_COSTTOL=1e-10)
assert net["OPF_converged"]
# assert and check result
logger.debug("test_opf_gen_loading")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_line.loading_percent:\n%s" % net.res_line.loading_percent)
assert max(net.res_line.loading_percent) < max_line_loading
logger.debug("res_trafo.loading_percent:\n%s" % net.res_trafo.loading_percent)
assert max(net.res_trafo.loading_percent) < 145
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_opf_sgen_loading():
""" Testing a simple network with transformer for loading
constraints with OPF using a generator """
# boundaries
vm_max = 1.5
vm_min = 0.5
max_trafo_loading = 800
max_line_loading = 13
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75, tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0, vn_hv_kv=10.0,
vscr_percent=2.8125, tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751, sn_kva=16.0,
pfe_kw=0.11, name=None, in_service=True, index=None,
max_loading_percent=max_trafo_loading)
pp.create_sgen(net, 3, p_kw=-10, controllable=True, max_p_kw=-5, min_p_kw=-15, max_q_kvar=25,
min_q_kvar=-25)
pp.create_polynomial_cost(net, 0, "sgen", np.array([-10, 0]))
pp.create_ext_grid(net, 0)
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([.1, 0]))
pp.create_line_from_parameters(net, 1, 2, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
pp.create_line_from_parameters(net, 2, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=max_line_loading)
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# assert and check result
logger.debug("test_opf_sgen_loading")
logger.debug("res_sgen:\n%s" % net.res_sgen)
logger.debug("res_line.loading_percent:\n%s" % net.res_line.loading_percent)
assert max(net.res_line.loading_percent) - max_line_loading < 1e-2
logger.debug("res_trafo.loading_percent:\n%s" % net.res_trafo.loading_percent)
assert max(net.res_trafo.loading_percent) < max_trafo_loading
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
# check connectivity check
pp.runopp(net, verbose=False, check_connectivity=True)
def test_unconstrained_line():
""" Testing a very simple network without transformer for voltage
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876)
pp.create_polynomial_cost(net, 0, "gen", np.array([1, 0]))
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert max(net.res_bus.vm_pu) < vm_max
assert min(net.res_bus.vm_pu) > vm_min
def test_trafo3w_loading():
net = pp.create_empty_network()
b1, b2, l1 = add_grid_connection(net, vn_kv=110.)
b3 = pp.create_bus(net, vn_kv=20.)
b4 = pp.create_bus(net, vn_kv=10.)
tidx = pp.create_transformer3w(
net, b2, b3, b4, std_type='63/25/38 MVA 110/20/10 kV', max_loading_percent=120)
pp.create_load(net, b3, 5e3, controllable=False)
id = pp.create_load(net, b4, 5e3, controllable=True, max_p_kw=5e4, min_p_kw=0, min_q_kvar=-1e9, max_q_kvar= 1e9)
pp.create_polynomial_cost(net, id, "load", np.array([-1, 0]))
#pp.create_xward(net, b4, 1000, 1000, 1000, 1000, 0.1, 0.1, 1.0)
net.trafo3w.shift_lv_degree.at[tidx] = 120
net.trafo3w.shift_mv_degree.at[tidx] = 80
# pp.runopp(net, calculate_voltage_angles = True) >> Doesn't converge
for init in ["pf", "flat"]:
pp.runopp(net, calculate_voltage_angles=False, verbose=False, init=init)
assert net["OPF_converged"]
assert abs(net.res_trafo3w.loading_percent.values - 120) < 1e-3
def test_dcopf_poly(simple_opf_test_net):
net = simple_opf_test_net
pp.create_polynomial_cost(net, 0, "gen", np.array([100, 0]))
# run OPF
pp.rundcopp(net, verbose=False)
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert abs(100 * net.res_gen.p_kw.values - net.res_cost) < 1e-3
def test_opf_poly(simple_opf_test_net):
net = simple_opf_test_net
pp.create_polynomial_cost(net, 0, "gen", np.array([100, 0]))
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert abs(100 * net.res_gen.p_kw.values - net.res_cost) < 1e-3
def test_opf_pwl(simple_opf_test_net):
# create net
net = simple_opf_test_net
# pp.create_polynomial_cost(net, 0, "gen", np.array([-100, 0]))
pp.create_piecewise_linear_cost(net, 0, "gen", np.array([[-200, -20000], [-100, -10000], [0, 0]]))
# run OPF
for init in ["pf", "flat"]:
pp.runopp(net, verbose=False, init=init)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert abs(100 * net.res_gen.p_kw.values - net.res_cost) < 1e-3
def test_dcopf_pwl(simple_opf_test_net):
# create net
net = simple_opf_test_net
# pp.create_polynomial_cost(net, 0, "gen", np.array([-100, 0]))
pp.create_piecewise_linear_cost(net, 0, "gen", np.array([[-200, -20000], [-100, -10000], [0, 0]]))
# run OPF
pp.rundcopp(net, verbose=False)
assert net["OPF_converged"]
# check and assert result
logger.debug("test_simplest_voltage")
logger.debug("res_gen:\n%s" % net.res_gen)
logger.debug("res_ext_grid:\n%s" % net.res_ext_grid)
logger.debug("res_bus.vm_pu: \n%s" % net.res_bus.vm_pu)
assert abs(100 * net.res_gen.p_kw.values - net.res_cost) < 1e-3
def test_opf_varying_max_line_loading():
""" Testing a simple network with transformer for loading
constraints with OPF using a generator """
# boundaries
vm_max = 1.5
vm_min = 0.5
max_trafo_loading = 800
max_line_loading = 13
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_transformer_from_parameters(net, 0, 1, vsc_percent=3.75, tp_max=2, vn_lv_kv=0.4,
shift_degree=150, tp_mid=0, vn_hv_kv=10.0,
vscr_percent=2.8125, tp_pos=0, tp_side="hv", tp_min=-2,
tp_st_percent=2.5, i0_percent=0.68751, sn_kva=16.0,
pfe_kw=0.11, name=None, in_service=True, index=None,
max_loading_percent=max_trafo_loading)
pp.create_sgen(net, 3, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=25,
min_q_kvar=-25)
pp.create_sgen(net, 2, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=25,
min_q_kvar=-25)
pp.create_polynomial_cost(net, 0, "sgen", np.array([10, 0]))
pp.create_polynomial_cost(net, 1, "sgen", np.array([10, 0]))
pp.create_ext_grid(net, 0)
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([-.1, 0]))
pp.create_line_from_parameters(net, 1, 2, 1, name="line1", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.200, x_ohm_per_km=0.1159876,
max_loading_percent=20)
pp.create_line_from_parameters(net, 1, 3, 1, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.100, x_ohm_per_km=0.1159876,
max_loading_percent=10)
# run OPF
pp.runopp(net, verbose=False, init="flat")
assert net["OPF_converged"]
assert sum(net["_ppc"]["branch"][:, 5] - np.array([ 0.02771281+0.j, 0.00692820+0.j, 0.12800000+0.j])) < 1e-8
# assert and check result
logger.debug("test_opf_sgen_loading")
logger.debug("res_sgen:\n%s" % net.res_sgen)
logger.debug("res_line.loading_percent:\n%s" % net.res_line.loading_percent)
assert net.res_line.loading_percent.at[0] - 20 < 1e-2
logger.debug("res_line.loading_percent:\n%s" % net.res_line.loading_percent)
assert net.res_line.loading_percent.at[1] - 10 < 1e-2
def test_storage_opf():
""" Testing a simple network with storage to ensure the correct behaviour
of the storage OPF-Functions """
# boundaries
vm_max = 1.1
vm_min = 0.9
max_line_loading_percent = 100
# create network
net = pp.create_empty_network()
b1 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
b2 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
pp.create_line(net, b1, b2, length_km=5, std_type="NAYY 4x50 SE",
max_loading_percent=max_line_loading_percent)
# test elements static
pp.create_ext_grid(net, b2)
pp.create_load(net, b1, p_kw=7.5, controllable=False)
pp.create_sgen(net, b1, p_kw=-25, controllable=True, max_p_kw=-10, min_p_kw=-25,
max_q_kvar=25, min_q_kvar=-25)
# test elements
pp.create_storage(net, b1, p_kw=-25, max_e_kwh=50, controllable=True, max_p_kw=0,
min_p_kw=-25, max_q_kvar=25, min_q_kvar=-25)
pp.create_sgen(net, b1, p_kw=-25, controllable=True, max_p_kw=0, min_p_kw=-25,
max_q_kvar=25, min_q_kvar=-25)
pp.create_load(net, b1, p_kw=25, controllable=True, max_p_kw=25, min_p_kw=0,
max_q_kvar=25, min_q_kvar=-25)
# costs
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([0, -3, 0]))
pp.create_polynomial_cost(net, 0, "sgen", np.array([0, -2, 0]))
pp.create_polynomial_cost(net, 0, "storage", np.array([0, -1, 0]))
pp.create_polynomial_cost(net, 1, "sgen", np.array([0, -1, 0]))
pp.create_polynomial_cost(net, 1, "load", np.array([0, -3, 0]))
# test storage generator behaviour
net["storage"].in_service.iloc[0] = True
net["storage"].p_kw.iloc[0] = -25
net["sgen"].in_service.iloc[1] = False
net["load"].in_service.iloc[1] = False
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
res_stor_p_kw = net["res_storage"].p_kw.iloc[0]
res_stor_q_kvar = net["res_storage"].q_kvar.iloc[0]
res_cost_stor = net["res_cost"]
net["storage"].in_service.iloc[0] = False
net["storage"].p_kw.iloc[0] = -25
net["sgen"].in_service.iloc[1] = True
net["load"].in_service.iloc[1] = False
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
res_sgen_p_kw = net["res_sgen"].p_kw.iloc[1]
res_sgen_q_kvar = net["res_sgen"].q_kvar.iloc[1]
res_cost_sgen = net["res_cost"]
# assert storage generator behaviour
assert np.isclose(res_stor_p_kw, res_sgen_p_kw)
assert np.isclose(res_stor_q_kvar, res_sgen_q_kvar)
assert np.isclose(res_cost_stor, res_cost_sgen)
# test storage load behaviour
net["storage"].in_service.iloc[0] = True
net["storage"].p_kw.iloc[0] = 25
net["storage"].max_p_kw.iloc[0] = 25
net["storage"].min_p_kw.iloc[0] = 0
net["storage"].max_q_kvar.iloc[0] = 25
net["storage"].min_q_kvar.iloc[0] = -25
# gencost for storages: positive costs in pandapower per definition
# --> storage gencosts are similar to sgen gencosts (make_objective.py, l.128ff. and l.185ff.)
net["polynomial_cost"].c.iloc[2] = net["polynomial_cost"].c.iloc[4]
net["sgen"].in_service.iloc[1] = False
net["load"].in_service.iloc[1] = False
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
res_stor_p_kw = net["res_storage"].p_kw.iloc[0]
res_stor_q_kvar = net["res_storage"].q_kvar.iloc[0]
res_cost_stor = net["res_cost"]
net["storage"].in_service.iloc[0] = False
net["storage"].p_kw.iloc[0] = 25
net["sgen"].in_service.iloc[1] = False
net["load"].in_service.iloc[1] = True
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
res_load_p_kw = net["res_load"].p_kw.iloc[1]
res_load_q_kvar = net["res_load"].q_kvar.iloc[1]
res_cost_load = net["res_cost"]
# assert storage load behaviour
assert np.isclose(res_stor_p_kw, res_load_p_kw)
assert np.isclose(res_stor_q_kvar, res_load_q_kvar)
assert np.isclose(res_cost_stor, res_cost_load)
def test_in_service_controllables():
""" Testing controllable but out of service elements behaviour """
# boundaries
vm_max = 1.1
vm_min = 0.9
max_line_loading_percent = 100
# create network
net = pp.create_empty_network()
b1 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
b2 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
pp.create_line(net, b1, b2, length_km=5, std_type="NAYY 4x50 SE",
max_loading_percent=max_line_loading_percent)
# test elements static
pp.create_ext_grid(net, b2)
pp.create_load(net, b1, p_kw=7.5, controllable=True, max_p_kw=10, min_p_kw=0,
max_q_kvar=2.5, min_q_kvar=-2.5)
pp.create_sgen(net, b1, p_kw=-25, controllable=True, max_p_kw=-10, min_p_kw=-25,
max_q_kvar=25, min_q_kvar=-25)
# test elements
pp.create_sgen(net, b1, p_kw=-25, controllable=True, max_p_kw=0, min_p_kw=-25,
max_q_kvar=25, min_q_kvar=-25)
pp.create_load(net, b1, p_kw=2.5, controllable=True, max_p_kw=2.5, min_p_kw=0,
max_q_kvar=2.5, min_q_kvar=-2.5)
# costs
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([0, 3, 0]))
pp.create_polynomial_cost(net, 0, "load", np.array([0, -1, 0]))
pp.create_polynomial_cost(net, 0, "sgen", np.array([0, 2, 0]))
pp.create_polynomial_cost(net, 1, "sgen", np.array([0, 1, 0]))
pp.create_polynomial_cost(net, 1, "load", np.array([0, -1, 0]))
net["sgen"].in_service.iloc[1] = False
net["load"].in_service.iloc[1] = False
pp.runopp(net, verbose=True)
assert net["OPF_converged"]
def test_no_controllables(simple_opf_test_net):
# was ist das problwem an diesem fall und wie fange ich es ab?
net = simple_opf_test_net
net.gen.controllable = False
# pp.runopp(net)
# net.gen = net.gen.drop(index=0)
pp.create_polynomial_cost(net, 0, "gen", np.array([0, -2, 0]))
pp.create_polynomial_cost(net, 0, "load", np.array([0, 1, 0]))
pp.runopp(net)
# def test_controllables_default():
# """ Testing sgens/gens/loads with no defined controllable parameter """
# # boundaries
# vm_max = 1.1
# #todo
# vm_min = 0.9
# max_line_loading_percent = 100
#9
# # create network
# net = pp.create_empty_network()
# b1 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
# b2 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
#
# pp.create_line(net, b1, b2, length_km=5, std_type="NAYY 4x50 SE",
# max_loading_percent=max_line_loading_percent)
#
# # test elements static
# pp.create_ext_grid(net, b2)
# pp.create_load(net, b1, p_kw=7.5, max_p_kw=10, min_p_kw=0,
# max_q_kvar=2.5, min_q_kvar=-2.5)
# net.load["controllable"] = False
# # load should default to controllable =False
# # pp.create_sgen(net, b1, p_kw=-25, max_p_kw=-10, min_p_kw=-25,
# # max_q_kvar=25, min_q_kvar=-25)
# # # sgen should default to controllable =True
# # pp.create_gen(net, b1, p_kw=-25, max_p_kw=-10, min_p_kw=-25,
# # max_q_kvar=25, min_q_kvar=-25)
# # # gen should default to controllable =True
#
# # costs
# pp.create_polynomial_cost(net, 0, "ext_grid", np.array([0,-3, 0]))
# pp.create_polynomial_cost(net, 0, "load", np.array([0, 1, 0]))
# pp.create_polynomial_cost(net, 0, "sgen", np.array([0, 2, 0]))
# pp.create_polynomial_cost(net, 0, "gen", np.array([0, 2, 0]))
pp.runopp(net, verbose=True)
assert net["OPF_converged"]
def test_opf_no_controllables_vs_pf():
""" Comparing the calculation results of PF and OPF in a simple network with non-controllable
elements """
# boundaries
vm_max = 1.3
vm_min = 0.9
max_line_loading_percent = 100
# create network
net = pp.create_empty_network()
b1 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
b2 = pp.create_bus(net, vn_kv=0.4, max_vm_pu=vm_max, min_vm_pu=vm_min)
pp.create_line(net, b1, b2, length_km=5, std_type="NAYY 4x50 SE",
max_loading_percent=max_line_loading_percent)
# test elements static
pp.create_ext_grid(net, b2)
pp.create_load(net, b1, p_kw=7.5, controllable=False)
pp.create_sgen(net, b1, p_kw=-25, controllable=False, max_p_kw=-10, min_p_kw=-25,
max_q_kvar=25, min_q_kvar=-25)
# testing cost assignment (for non-controllable elements - see Gitlab Issue #27)
pp.create_polynomial_cost(net, 0, "ext_grid", np.array([0, 3, 0]))
pp.create_polynomial_cost(net, 0, "load", np.array([0, -3, 0]))
pp.create_polynomial_cost(net, 0, "sgen", np.array([0, 2, 0]))
# do calculations
pp.runopp(net, verbose=True)
assert net["OPF_converged"]
res_opf_line_loading = net.res_line.loading_percent
res_opf_bus_voltages = net.res_bus.vm_pu
pp.runpp(net, verbose=True)
assert net["converged"]
res_pf_line_loading = net.res_line.loading_percent
res_pf_bus_voltages = net.res_bus.vm_pu
# assert calculation behaviour
assert np.isclose(res_opf_line_loading, res_pf_line_loading).all()
assert np.isclose(res_opf_bus_voltages, res_pf_bus_voltages).all()
if __name__ == "__main__":
pytest.main(['-s', __file__])
# test_storage_opf()
# test_opf_varying_max_line_loading()
#pytest.main(['-s', __file__])
#test_storage_opf()
# test_opf_no_controllables_vs_pf()
#test_opf_varying_max_line_loading()
# pytest.main(["test_basic.py", "-s"])
# test_simplest_dispatch()
# test_trafo3w_loading()
# test_trafo3w_loading()
# test_dcopf_pwl()
# net = simple_opf_test_net()
# test_no_controllables(net)
# test_controllables_default()
| 42.04551
| 116
| 0.635959
| 5,725
| 34,183
| 3.482969
| 0.055371
| 0.069408
| 0.028285
| 0.021214
| 0.877332
| 0.851003
| 0.836309
| 0.826078
| 0.81008
| 0.78992
| 0
| 0.059288
| 0.233713
| 34,183
| 813
| 117
| 42.04551
| 0.701955
| 0.143551
| 0
| 0.729367
| 0
| 0
| 0.070269
| 0.015481
| 0
| 0
| 0
| 0.00123
| 0.119002
| 1
| 0.038388
| false
| 0
| 0.015355
| 0
| 0.055662
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
120090acdc01c506da9760792ff51bd01b7bc2dc
| 48,095
|
py
|
Python
|
mlni/classification.py
|
AbdulkadirA/mlni
|
f58d53cd70d700289063ce3ca4ad475607806729
|
[
"MIT"
] | null | null | null |
mlni/classification.py
|
AbdulkadirA/mlni
|
f58d53cd70d700289063ce3ca4ad475607806729
|
[
"MIT"
] | null | null | null |
mlni/classification.py
|
AbdulkadirA/mlni
|
f58d53cd70d700289063ce3ca4ad475607806729
|
[
"MIT"
] | null | null | null |
from mlni.base import WorkFlow, ClassificationAlgorithm, ClassificationValidation
import numpy as np
import pandas as pd
import os, json
from sklearn.svm import SVC, SVR
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import StratifiedKFold, StratifiedShuffleSplit
from multiprocessing.pool import ThreadPool
from mlni.utils import evaluate_prediction, gram_matrix_linear, time_bar
from sklearn.ensemble import RandomForestClassifier
from sklearn.decomposition import PCA
from sklearn.feature_selection import f_classif, RFE, SelectPercentile, SelectFromModel
__author__ = "Junhao Wen"
__copyright__ = "Copyright 2019-2020 The CBICA & SBIA Lab"
__credits__ = ["Junhao Wen, Jorge Samper-González"]
__license__ = "See LICENSE file"
__version__ = "0.1.0"
__maintainer__ = "Junhao Wen"
__email__ = "junhao.wen89@gmail.com"
__status__ = "Development"
class RB_RepeatedHoldOut_DualSVM_Classification(WorkFlow):
"""
The main class to run classification with repeated holdout CV for classification.
"""
def __init__(self, input, split_index, output_dir, n_threads=8, n_iterations=100, test_size=0.2,
grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), kernel=None, verbose=False):
self._input = input
self._split_index = split_index
self._output_dir = output_dir
self._n_threads = n_threads
self._n_iterations = n_iterations
self._grid_search_folds = grid_search_folds
self._balanced = balanced
self._c_range = c_range
self._verbose = verbose
self._test_size = test_size
self._validation = None
self._algorithm = None
self._kernel = kernel
def run(self):
if self._kernel is None:
kernel = self._input.get_kernel()
else:
kernel = self._kernel
x = self._input.get_x()
y = self._input.get_y()
if self._verbose:
if y[0] == 0:
print('For classification, the negative coefficients in the weight map are more likely to be classified as the first label in the diagnose tsv')
else:
print('For classification, the positive coefficients in the weight map are more likely to be classified as the second label in the diagnose tsv')
self._algorithm = LinearSVMAlgorithmWithPrecomputedKernel(kernel,
y,
balanced=self._balanced,
grid_search_folds=self._grid_search_folds,
c_range=self._c_range,
n_threads=self._n_threads, verbose=self._verbose)
self._validation = RepeatedHoldOut(self._algorithm, n_iterations=self._n_iterations, test_size=self._test_size)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads,
splits_indices=self._split_index, verbose=self._verbose)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not os.path.exists(classifier_dir):
os.makedirs(classifier_dir)
self._algorithm.save_classifier(classifier, classifier_dir)
self._algorithm.save_parameters(best_params, classifier_dir)
self._algorithm.save_weights(classifier, x, classifier_dir)
self._validation.save_results(self._output_dir)
class RB_RepeatedHoldOut_DualSVM_Classification_Nested_Feature_Selection(WorkFlow):
"""
The main class to run classification with stritified KFold CV for classification with roi features and nested feature selection.
"""
def __init__(self, input, split_index, output_dir, n_threads=8, n_iterations=100, test_size=0.2, grid_search_folds=10,
balanced=True, c_range=np.logspace(-6, 2, 17), feature_selection_method='RFE', top_k=50, verbose=False):
self._input = input
self._split_index = split_index
self._output_dir = output_dir
self._n_threads = n_threads
self._n_iterations = n_iterations
self._grid_search_folds = grid_search_folds
self._balanced = balanced
self._c_range = c_range
self._verbose = verbose
self._test_size = test_size
self._validation = None
self._algorithm = None
self._feature_selection_method = feature_selection_method
self._top_k = top_k
def run(self):
x = self._input.get_x()
y = self._input.get_y()
if self._verbose:
if y[0] == 0:
print('For classification, the negative coefficients in the weight map are more likely to be classified as the first label in the diagnose tsv')
else:
print('For classification, the positive coefficients in the weight map are more likely to be classified as the second label in the diagnose tsv')
self._algorithm = LinearSVMAlgorithmWithPrecomputedKernelNestedFeatureSelection(x, y, self._feature_selection_method,
self._top_k, balanced=self._balanced, grid_search_folds=self._grid_search_folds, c_range=self._c_range,
n_threads=self._n_threads, verbose=self._verbose)
self._validation = RepeatedHoldOut(self._algorithm, n_iterations=self._n_iterations, test_size=self._test_size)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads,
splits_indices=self._split_index, verbose=self._verbose)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not os.path.exists(classifier_dir):
os.makedirs(classifier_dir)
self._algorithm.save_classifier(classifier, classifier_dir)
self._algorithm.save_parameters(best_params, classifier_dir)
self._algorithm.save_weights(classifier, x, classifier_dir)
self._validation.save_results(self._output_dir)
class VB_RepeatedHoldOut_DualSVM_Classification(WorkFlow):
"""
The main class to run MLNI with repeated holdout CV for classification with voxel-wise features.
"""
def __init__(self, input, split_index, output_dir, n_threads=8, n_iterations=100, test_size=0.2,
grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), verbose=False):
self._input = input
self._split_index = split_index
self._output_dir = output_dir
self._n_threads = n_threads
self._n_iterations = n_iterations
self._grid_search_folds = grid_search_folds
self._balanced = balanced
self._c_range = c_range
self._verbose = verbose
self._test_size = test_size
self._validation = None
self._algorithm = None
def run(self):
x = self._input.get_x()
y = self._input.get_y()
kernel = self._input.get_kernel()
## for voxel approach, we use kernel==precomputed to boost the algorithm
self._algorithm = LinearSVMAlgorithmWithPrecomputedKernel(kernel,
y,
balanced=self._balanced,
grid_search_folds=self._grid_search_folds,
c_range=self._c_range,
n_threads=self._n_threads, verbose=self._verbose)
self._validation = RepeatedHoldOut(self._algorithm, n_iterations=self._n_iterations, test_size=self._test_size)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads,
splits_indices=self._split_index, verbose=self._verbose)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not os.path.exists(classifier_dir):
os.makedirs(classifier_dir)
self._algorithm.save_classifier(classifier, classifier_dir)
self._algorithm.save_parameters(best_params, classifier_dir)
weights = self._algorithm.save_weights(classifier, x, classifier_dir)
self._validation.save_results(self._output_dir)
self._input.save_weights_as_nifti(weights, classifier_dir)
class VB_RepeatedHoldOut_DualSVM_Classification_Nested_Feature_Selection(WorkFlow):
"""
The main class to run MLNI with repeated holdout CV for classification with voxel-wise features and nested feature selection.
"""
def __init__(self, input, split_index, output_dir, n_threads=8, n_iterations=100, test_size=0.2, grid_search_folds=10,
balanced=True, c_range=np.logspace(-6, 2, 17), feature_selection_method='RFE', top_k=50, verbose=False ):
self._input = input
self._split_index = split_index
self._output_dir = output_dir
self._n_threads = n_threads
self._n_iterations = n_iterations
self._grid_search_folds = grid_search_folds
self._balanced = balanced
self._c_range = c_range
self._verbose = verbose
self._test_size = test_size
self._validation = None
self._algorithm = None
self._feature_selection_method = feature_selection_method
self._top_k = top_k
def run(self):
x = self._input.get_x()
y = self._input.get_y()
## for voxel approach, we use kernel==precomputed to boost the algorithm
self._algorithm = LinearSVMAlgorithmWithPrecomputedKernelNestedFeatureSelection(x, y, self._feature_selection_method, self._top_k,
balanced=self._balanced, grid_search_folds=self._grid_search_folds, c_range=self._c_range,
n_threads=self._n_threads, verbose=self._verbose)
self._validation = RepeatedHoldOut(self._algorithm, n_iterations=self._n_iterations, test_size=self._test_size)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads, splits_indices=self._split_index,
verbose=self._verbose)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not os.path.exists(classifier_dir):
os.makedirs(classifier_dir)
self._algorithm.save_classifier(classifier, classifier_dir)
self._algorithm.save_parameters(best_params, classifier_dir)
weights = self._algorithm.save_weights(classifier, x, classifier_dir)
self._validation.save_results(self._output_dir)
class RB_KFold_DualSVM_Classification(WorkFlow):
"""
The main class to run MLNI with stritified KFold CV for classification with ROI features.
"""
def __init__(self, input, split_index, output_dir, n_folds, n_threads=8, grid_search_folds=10, balanced=True,
c_range=np.logspace(-6, 2, 17), kernel=None, verbose=False):
self._input = input
self._split_index = split_index
self._output_dir = output_dir
self._n_threads = n_threads
self._grid_search_folds = grid_search_folds
self._balanced = balanced
self._c_range = c_range
self._verbose = verbose
self._n_folds = n_folds
self._validation = None
self._algorithm = None
self._kernel = kernel
def run(self):
if self._kernel is None:
kernel = self._input.get_kernel()
else:
kernel = self._kernel
x = self._input.get_x()
y = self._input.get_y()
if self._verbose:
if y[0] == 0:
print('For classification, the negative coefficients in the weight map are more likely to be classified as the first label in the diagnose tsv')
else:
print('For classification, the positive coefficients in the weight map are more likely to be classified as the second label in the diagnose tsv')
self._algorithm = LinearSVMAlgorithmWithPrecomputedKernel(kernel, y, balanced=self._balanced,
grid_search_folds=self._grid_search_folds, c_range=self._c_range,
n_threads=self._n_threads, verbose=self._verbose)
self._validation = KFoldCV(self._algorithm)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads,
splits_indices=self._split_index,
n_folds=self._n_folds, verbose=self._verbose)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not os.path.exists(classifier_dir):
os.makedirs(classifier_dir)
self._algorithm.save_classifier(classifier, classifier_dir)
self._algorithm.save_parameters(best_params, classifier_dir)
self._algorithm.save_weights(classifier, x, classifier_dir)
self._validation.save_results(self._output_dir)
class VB_KFold_DualSVM_Classification(WorkFlow):
"""
The main class to run MLNI with stritified KFold CV for classification with voxel features.
"""
def __init__(self, input, split_index, output_dir, n_folds, n_threads=8, grid_search_folds=10, balanced=True,
c_range=np.logspace(-6, 2, 17), verbose=False):
self._input = input
self._split_index = split_index
self._output_dir = output_dir
self._n_threads = n_threads
self._grid_search_folds = grid_search_folds
self._balanced = balanced
self._c_range = c_range
self._verbose = verbose
self._n_folds = n_folds
self._validation = None
self._algorithm = None
def run(self):
x = self._input.get_x()
y = self._input.get_y()
kernel = self._input.get_kernel()
if self._verbose:
if y[0] == 0:
print('For classification, the negative coefficients in the weight map are more likely to be classified as the first label in the diagnose tsv')
else:
print('For classification, the positive coefficients in the weight map are more likely to be classified as the second label in the diagnose tsv')
self._algorithm = LinearSVMAlgorithmWithPrecomputedKernel(kernel, y, balanced=self._balanced,
grid_search_folds=self._grid_search_folds, c_range=self._c_range,
n_threads=self._n_threads, verbose=self._verbose)
self._validation = KFoldCV(self._algorithm)
classifier, best_params, results = self._validation.validate(y, n_threads=self._n_threads,
splits_indices=self._split_index,
n_folds=self._n_folds, verbose=self._verbose)
classifier_dir = os.path.join(self._output_dir, 'classifier')
if not os.path.exists(classifier_dir):
os.makedirs(classifier_dir)
self._algorithm.save_classifier(classifier, classifier_dir)
self._algorithm.save_parameters(best_params, classifier_dir)
weights = self._algorithm.save_weights(classifier, x, classifier_dir)
self._validation.save_results(self._output_dir)
self._input.save_weights_as_nifti(weights, classifier_dir)
class LinearSVMAlgorithmWithPrecomputedKernel(ClassificationAlgorithm):
'''
Dual SVM with precomputed linear kernel SVM.
'''
def __init__(self, kernel, y, balanced=True, grid_search_folds=10, c_range=np.logspace(-6, 2, 17), n_threads=15,
verbose=False):
self._kernel = kernel
self._y = y
self._balanced = balanced
self._grid_search_folds = grid_search_folds
self._c_range = c_range
self._n_threads = n_threads
self._verbose = verbose
def _launch_svc(self, kernel_train, x_test, y_train, y_test, c):
if self._balanced:
svc = SVC(C=c, kernel='precomputed', probability=True, tol=1e-6, class_weight='balanced')
else:
svc = SVC(C=c, kernel='precomputed', probability=True, tol=1e-6)
svc.fit(kernel_train, y_train)
y_hat_train = svc.predict(kernel_train)
y_hat = svc.predict(x_test)
proba_test_index1 = svc.predict_proba(x_test)[:, 1]
auc = roc_auc_score(y_test, proba_test_index1)
return svc, y_hat, auc, y_hat_train, proba_test_index1
def _grid_search(self, kernel_train, x_test, y_train, y_test, c):
_, y_hat, _, _, _ = self._launch_svc(kernel_train, x_test, y_train, y_test, c)
ba = evaluate_prediction(y_test, y_hat)['balanced_accuracy']
return ba
def _select_best_parameter(self, async_result):
c_values = []
accuracies = []
for fold in async_result.keys():
best_c = -1
best_acc = -1
for c, async_acc in async_result[fold].items():
acc = async_acc.get()
if acc > best_acc:
best_c = c
best_acc = acc
c_values.append(best_c)
accuracies.append(best_acc)
best_acc = np.mean(accuracies)
best_c = np.power(10, np.mean(np.log10(c_values)))
return {'c': best_c, 'balanced_accuracy': best_acc}
def evaluate(self, train_index, test_index):
inner_pool = ThreadPool(self._n_threads)
async_result = {}
for i in range(self._grid_search_folds):
async_result[i] = {}
outer_kernel = self._kernel[train_index, :][:, train_index]
y_train = self._y[train_index]
skf = StratifiedKFold(n_splits=self._grid_search_folds, shuffle=True)
inner_cv = list(skf.split(np.zeros(len(y_train)), y_train))
for i in range(len(inner_cv)):
inner_train_index, inner_test_index = inner_cv[i]
inner_kernel = outer_kernel[inner_train_index, :][:, inner_train_index]
x_test_inner = outer_kernel[inner_test_index, :][:, inner_train_index]
y_train_inner, y_test_inner = y_train[inner_train_index], y_train[inner_test_index]
for c in self._c_range:
if self._verbose:
print("Inner CV for C=%f..." % c)
async_result[i][c] = inner_pool.apply_async(self._grid_search, args=(inner_kernel, x_test_inner, y_train_inner,
y_test_inner, c))
inner_pool.close()
inner_pool.join()
best_parameter = self._select_best_parameter(async_result)
x_test = self._kernel[test_index, :][:, train_index]
y_train, y_test = self._y[train_index], self._y[test_index]
_, y_hat, auc, y_hat_train, proba_test_index1 = self._launch_svc(outer_kernel, x_test, y_train, y_test, best_parameter['c'])
result = dict()
result['best_parameter'] = best_parameter
result['evaluation'] = evaluate_prediction(y_test, y_hat)
result['evaluation_train'] = evaluate_prediction(y_train, y_hat_train)
result['y_hat'] = y_hat
result['y_hat_train'] = y_hat_train
result['y'] = y_test
result['y_train'] = y_train
result['y_index'] = test_index
result['x_index'] = train_index
result['auc'] = auc
result['proba_test_index1'] = proba_test_index1
return result
def apply_best_parameters(self, results_list):
best_c_list = []
bal_acc_list = []
for result in results_list:
best_c_list.append(result['best_parameter']['c'])
bal_acc_list.append(result['best_parameter']['balanced_accuracy'])
# 10^(mean of log10 of best Cs of each fold) is selected
best_c = np.power(10, np.mean(np.log10(best_c_list)))
# Mean balanced accuracy
mean_bal_acc = np.mean(bal_acc_list)
if self._balanced:
svc = SVC(C=best_c, kernel='precomputed', probability=True, tol=1e-6, class_weight='balanced')
else:
svc = SVC(C=best_c, kernel='precomputed', probability=True, tol=1e-6)
svc.fit(self._kernel, self._y)
return svc, {'c': best_c, 'balanced_accuracy': mean_bal_acc}
def save_classifier(self, classifier, output_dir):
np.savetxt(os.path.join(output_dir, 'dual_coefficients.txt'), classifier.dual_coef_)
np.savetxt(os.path.join(output_dir, 'support_vectors_indices.txt'), classifier.support_)
np.savetxt(os.path.join(output_dir, 'intersect.txt'), classifier.intercept_)
def save_weights(self, classifier, x, output_dir):
dual_coefficients = classifier.dual_coef_
sv_indices = classifier.support_
weighted_sv = dual_coefficients.transpose() * x[sv_indices]
weights = np.sum(weighted_sv, 0)
np.savetxt(os.path.join(output_dir, 'weights.txt'), weights)
return weights
def save_parameters(self, parameters_dict, output_dir):
with open(os.path.join(output_dir, 'best_parameters.json'), 'w') as f:
json.dump(parameters_dict, f)
class LinearSVMAlgorithmWithPrecomputedKernelNestedFeatureSelection(ClassificationAlgorithm):
'''
Dual SVM with precomputed linear kernel for nested feature selection.
'''
def __init__(self, x, y, feature_selection_method, top_k, balanced=True, grid_search_folds=10, c_range=np.logspace(-6, 2, 17), n_threads=15,
verbose=False):
self._x = x
self._y = y
self._feature_selection_method = feature_selection_method
self._top_k = top_k
self._balanced = balanced
self._grid_search_folds = grid_search_folds
self._c_range = c_range
self._n_threads = n_threads
self._verbose = verbose
def _launch_svc(self, kernel_train, x_test, y_train, y_test, c):
if self._balanced:
svc = SVC(C=c, kernel='precomputed', probability=True, tol=1e-6, class_weight='balanced')
else:
svc = SVC(C=c, kernel='precomputed', probability=True, tol=1e-6)
svc.fit(kernel_train, y_train)
y_hat_train = svc.predict(kernel_train)
y_hat = svc.predict(x_test)
proba_test_index1 = svc.predict_proba(x_test)[:, 1]
auc = roc_auc_score(y_test, proba_test_index1)
return svc, y_hat, auc, y_hat_train, proba_test_index1
def _grid_search(self, kernel_train, x_test, y_train, y_test, c):
_, y_hat, _, _, _ = self._launch_svc(kernel_train, x_test, y_train, y_test, c)
ba = evaluate_prediction(y_test, y_hat)['balanced_accuracy']
return ba
def _select_best_parameter(self, async_result):
c_values = []
accuracies = []
for fold in async_result.keys():
best_c = -1
best_acc = -1
for c, async_acc in async_result[fold].items():
acc = async_acc.get()
if acc > best_acc:
best_c = c
best_acc = acc
c_values.append(best_c)
accuracies.append(best_acc)
best_acc = np.mean(accuracies)
best_c = np.power(10, np.mean(np.log10(c_values)))
return {'c': best_c, 'balanced_accuracy': best_acc}
def evaluate(self, train_index, test_index):
inner_pool = ThreadPool(self._n_threads)
async_result = {}
for i in range(self._grid_search_folds):
async_result[i] = {}
if self._feature_selection_method == 'ANOVA':
selector = SelectPercentile(f_classif, percentile=self._top_k)
selector.fit(self._x[train_index], self._y[train_index])
x_after = selector.transform(self._x)
elif self._feature_selection_method == 'RF':
clf = RandomForestClassifier(n_estimators=250, random_state=0, n_jobs=-1)
clf.fit(self._x[train_index], self._y[train_index])
selector = SelectFromModel(clf, threshold= self._top_k)
selector.fit(self._x[train_index], self._y[train_index])
x_after = selector.transform(self._x)
elif self._feature_selection_method == 'PCA':
selector = PCA(n_components=self._top_k)
selector.fit(self._x[train_index])
x_after = selector.transform(self._x)
elif self._feature_selection_method == 'RFE':
svc = SVR(kernel="linear")
selector = RFE(estimator=svc, n_features_to_select=int(0.01 * self._top_k * self._x[train_index].shape[1]), step=0.5)
selector.fit(self._x[train_index], self._y[train_index])
x_after = selector.transform(self._x)
self._kernel = gram_matrix_linear(x_after)
outer_kernel = self._kernel[train_index, :][:, train_index]
y_train = self._y[train_index]
skf = StratifiedKFold(n_splits=self._grid_search_folds, shuffle=True)
inner_cv = list(skf.split(np.zeros(len(y_train)), y_train))
for i in range(len(inner_cv)):
inner_train_index, inner_test_index = inner_cv[i]
inner_kernel = outer_kernel[inner_train_index, :][:, inner_train_index]
x_test_inner = outer_kernel[inner_test_index, :][:, inner_train_index]
y_train_inner, y_test_inner = y_train[inner_train_index], y_train[inner_test_index]
for c in self._c_range:
if self._verbose:
print("Inner CV for C=%f..." % c)
async_result[i][c] = inner_pool.apply_async(self._grid_search, args=(inner_kernel, x_test_inner, y_train_inner,
y_test_inner, c))
inner_pool.close()
inner_pool.join()
best_parameter = self._select_best_parameter(async_result)
x_test = self._kernel[test_index, :][:, train_index]
y_train, y_test = self._y[train_index], self._y[test_index]
_, y_hat, auc, y_hat_train, proba_test_index1 = self._launch_svc(outer_kernel, x_test, y_train, y_test, best_parameter['c'])
result = dict()
result['best_parameter'] = best_parameter
result['evaluation'] = evaluate_prediction(y_test, y_hat)
result['evaluation_train'] = evaluate_prediction(y_train, y_hat_train)
result['y_hat'] = y_hat
result['y_hat_train'] = y_hat_train
result['y'] = y_test
result['y_train'] = y_train
result['y_index'] = test_index
result['x_index'] = train_index
result['auc'] = auc
result['proba_test_index1'] = proba_test_index1
return result
def apply_best_parameters(self, results_list):
best_c_list = []
bal_acc_list = []
for result in results_list:
best_c_list.append(result['best_parameter']['c'])
bal_acc_list.append(result['best_parameter']['balanced_accuracy'])
# 10^(mean of log10 of best Cs of each fold) is selected
best_c = np.power(10, np.mean(np.log10(best_c_list)))
# Mean balanced accuracy
mean_bal_acc = np.mean(bal_acc_list)
if self._balanced:
svc = SVC(C=best_c, kernel='precomputed', probability=True, tol=1e-6, class_weight='balanced')
else:
svc = SVC(C=best_c, kernel='precomputed', probability=True, tol=1e-6)
svc.fit(self._kernel, self._y)
return svc, {'c': best_c, 'balanced_accuracy': mean_bal_acc}
def save_classifier(self, classifier, output_dir):
np.savetxt(os.path.join(output_dir, 'dual_coefficients.txt'), classifier.dual_coef_)
np.savetxt(os.path.join(output_dir, 'support_vectors_indices.txt'), classifier.support_)
np.savetxt(os.path.join(output_dir, 'intersect.txt'), classifier.intercept_)
def save_weights(self, classifier, x, output_dir):
dual_coefficients = classifier.dual_coef_
sv_indices = classifier.support_
weighted_sv = dual_coefficients.transpose() * x[sv_indices]
weights = np.sum(weighted_sv, 0)
np.savetxt(os.path.join(output_dir, 'weights.txt'), weights)
return weights
def save_parameters(self, parameters_dict, output_dir):
with open(os.path.join(output_dir, 'best_parameters.json'), 'w') as f:
json.dump(parameters_dict, f)
class KFoldCV(ClassificationValidation):
"""
KFold CV.
"""
def __init__(self, ml_algorithm):
self._ml_algorithm = ml_algorithm
self._fold_results = []
self._classifier = None
self._best_params = None
self._cv = None
def validate(self, y, n_folds=10, n_threads=15, splits_indices=None, verbose=False):
if splits_indices is None:
skf = StratifiedKFold(n_splits=n_folds, shuffle=True, )
self._cv = list(skf.split(np.zeros(len(y)), y))
else:
self._cv = splits_indices
async_pool = ThreadPool(n_threads)
async_result = {}
for i in range(n_folds):
time_bar(i, n_folds)
print()
if verbose:
print("Repetition %d of CV..." % i)
train_index, test_index = self._cv[i]
async_result[i] = async_pool.apply_async(self._ml_algorithm.evaluate, args=(train_index, test_index))
async_pool.close()
async_pool.join()
for i in range(n_folds):
self._fold_results.append(async_result[i].get())
## save the mean of the best models
self._classifier, self._best_params = self._ml_algorithm.apply_best_parameters(self._fold_results)
return self._classifier, self._best_params, self._fold_results
def save_results(self, output_dir):
if self._fold_results is None:
raise Exception("No results to save. Method validate() must be run before save_results().")
subjects_folds = []
results_folds = []
container_dir = os.path.join(output_dir, 'folds')
if not os.path.exists(container_dir):
os.makedirs(container_dir)
for i in range(len(self._fold_results)):
subjects_df = pd.DataFrame({'y': self._fold_results[i]['y'],
'y_hat': self._fold_results[i]['y_hat'],
'y_index': self._fold_results[i]['y_index']})
subjects_df.to_csv(os.path.join(container_dir, 'subjects_fold-' + str(i) + '.tsv'),
index=False, sep='\t', encoding='utf-8')
subjects_folds.append(subjects_df)
results_df = pd.DataFrame({'balanced_accuracy': self._fold_results[i]['evaluation']['balanced_accuracy'],
'auc': self._fold_results[i]['auc'],
'accuracy': self._fold_results[i]['evaluation']['accuracy'],
'sensitivity': self._fold_results[i]['evaluation']['sensitivity'],
'specificity': self._fold_results[i]['evaluation']['specificity'],
'ppv': self._fold_results[i]['evaluation']['ppv'],
'npv': self._fold_results[i]['evaluation']['npv']}, index=['i', ])
results_df.to_csv(os.path.join(container_dir, 'results_fold-' + str(i) + '.tsv'),
index=False, sep='\t', encoding='utf-8')
results_folds.append(results_df)
all_subjects = pd.concat(subjects_folds)
all_subjects.to_csv(os.path.join(output_dir, 'subjects.tsv'),
index=False, sep='\t', encoding='utf-8')
all_results = pd.concat(results_folds)
all_results.to_csv(os.path.join(output_dir, 'results.tsv'),
index=False, sep='\t', encoding='utf-8')
mean_results = pd.DataFrame(all_results.apply(np.nanmean).to_dict(), columns=all_results.columns, index=[0, ])
mean_results.to_csv(os.path.join(output_dir, 'mean_results.tsv'),
index=False, sep='\t', encoding='utf-8')
print("Mean results of the classification:")
print("Balanced accuracy: %s" %(mean_results['balanced_accuracy'].to_string(index = False)))
print("specificity: %s" % (mean_results['specificity'].to_string(index=False)))
print("sensitivity: %s" % (mean_results['sensitivity'].to_string(index=False)))
print("auc: %s" % (mean_results['auc'].to_string(index=False)))
class RepeatedHoldOut(ClassificationValidation):
"""
Repeated holdout splits CV.
"""
def __init__(self, ml_algorithm, n_iterations=100, test_size=0.3):
self._ml_algorithm = ml_algorithm
self._split_results = []
self._classifier = None
self._best_params = None
self._cv = None
self._n_iterations = n_iterations
self._test_size = test_size
self._error_resampled_t = None
self._error_corrected_resampled_t = None
self._bal_accuracy_resampled_t = None
self._bal_accuracy_corrected_resampled_t = None
def validate(self, y, n_threads=15, splits_indices=None, inner_cv=True, verbose=False):
if splits_indices is None:
splits = StratifiedShuffleSplit(n_splits=self._n_iterations, test_size=self._test_size)
self._cv = list(splits.split(np.zeros(len(y)), y))
else:
self._cv = splits_indices
async_pool = ThreadPool(n_threads)
async_result = {}
for i in range(self._n_iterations):
time_bar(i, self._n_iterations)
print()
if verbose:
print("Repetition %d of CV..." % i)
train_index, test_index = self._cv[i]
if inner_cv:
async_result[i] = async_pool.apply_async(self._ml_algorithm.evaluate, args=(train_index, test_index))
else:
raise Exception("We always do nested CV")
async_pool.close()
async_pool.join()
for i in range(self._n_iterations):
self._split_results.append(async_result[i].get())
self._classifier, self._best_params = self._ml_algorithm.apply_best_parameters(self._split_results)
return self._classifier, self._best_params, self._split_results
def save_results(self, output_dir):
if self._split_results is None:
raise Exception("No results to save. Method validate() must be run before save_results().")
all_results_list = []
all_train_subjects_list = []
all_test_subjects_list = []
for iteration in range(len(self._split_results)):
iteration_dir = os.path.join(output_dir, 'iteration-' + str(iteration))
if not os.path.exists(iteration_dir):
os.makedirs(iteration_dir)
iteration_train_subjects_df = pd.DataFrame({'iteration': iteration,
'y': self._split_results[iteration]['y_train'],
'y_hat': self._split_results[iteration]['y_hat_train'],
'subject_index': self._split_results[iteration]['x_index']})
iteration_train_subjects_df.to_csv(os.path.join(iteration_dir, 'train_subjects.tsv'),
index=False, sep='\t', encoding='utf-8')
all_train_subjects_list.append(iteration_train_subjects_df)
iteration_test_subjects_df = pd.DataFrame({'iteration': iteration,
'y': self._split_results[iteration]['y'],
'y_hat': self._split_results[iteration]['y_hat'],
'subject_index': self._split_results[iteration]['y_index'],
'proba_test_index1': self._split_results[iteration]['proba_test_index1']})
iteration_test_subjects_df.to_csv(os.path.join(iteration_dir, 'test_subjects.tsv'),
index=False, sep='\t', encoding='utf-8')
all_test_subjects_list.append(iteration_test_subjects_df)
iteration_results_df = pd.DataFrame(
{'balanced_accuracy': self._split_results[iteration]['evaluation']['balanced_accuracy'],
'auc': self._split_results[iteration]['auc'],
'accuracy': self._split_results[iteration]['evaluation']['accuracy'],
'sensitivity': self._split_results[iteration]['evaluation']['sensitivity'],
'specificity': self._split_results[iteration]['evaluation']['specificity'],
'ppv': self._split_results[iteration]['evaluation']['ppv'],
'npv': self._split_results[iteration]['evaluation']['npv'],
'train_balanced_accuracy': self._split_results[iteration]['evaluation_train']['balanced_accuracy'],
'train_accuracy': self._split_results[iteration]['evaluation_train']['accuracy'],
'train_sensitivity': self._split_results[iteration]['evaluation_train']['sensitivity'],
'train_specificity': self._split_results[iteration]['evaluation_train']['specificity'],
'train_ppv': self._split_results[iteration]['evaluation_train']['ppv'],
'train_npv': self._split_results[iteration]['evaluation_train']['npv']
}, index=['i', ])
iteration_results_df.to_csv(os.path.join(iteration_dir, 'results.tsv'),
index=False, sep='\t', encoding='utf-8')
all_results_list.append(iteration_results_df)
all_train_subjects_df = pd.concat(all_train_subjects_list)
all_train_subjects_df.to_csv(os.path.join(output_dir, 'train_subjects.tsv'),
index=False, sep='\t', encoding='utf-8')
all_test_subjects_df = pd.concat(all_test_subjects_list)
all_test_subjects_df.to_csv(os.path.join(output_dir, 'test_subjects.tsv'),
index=False, sep='\t', encoding='utf-8')
all_results_df = pd.concat(all_results_list)
all_results_df.to_csv(os.path.join(output_dir, 'results.tsv'),
index=False, sep='\t', encoding='utf-8')
mean_results_df = pd.DataFrame(all_results_df.apply(np.nanmean).to_dict(),
columns=all_results_df.columns, index=[0, ])
mean_results_df.to_csv(os.path.join(output_dir, 'mean_results.tsv'),
index=False, sep='\t', encoding='utf-8')
print("Mean results of the classification:")
print("Balanced accuracy: %s" % (mean_results_df['balanced_accuracy'].to_string(index=False)))
print("specificity: %s" % (mean_results_df['specificity'].to_string(index=False)))
print("sensitivity: %s" % (mean_results_df['sensitivity'].to_string(index=False)))
print("auc: %s" % (mean_results_df['auc'].to_string(index=False)))
self.compute_error_variance()
self.compute_accuracy_variance()
variance_df = pd.DataFrame({'bal_accuracy_resampled_t': self._bal_accuracy_resampled_t,
'bal_accuracy_corrected_resampled_t': self._bal_accuracy_corrected_resampled_t,
'error_resampled_t': self._error_resampled_t,
'error_corrected_resampled_t': self._error_corrected_resampled_t}, index=[0, ])
variance_df.to_csv(os.path.join(output_dir, 'variance.tsv'),
index=False, sep='\t', encoding='utf-8')
def _compute_variance(self, test_error_split):
# compute average test error
num_split = len(self._split_results) # J in the paper
# compute mu_{n_1}^{n_2}
average_test_error = np.mean(test_error_split)
approx_variance = np.sum((test_error_split - average_test_error)**2)/(num_split - 1)
# compute variance (point 2 and 6 of Nadeau's paper)
resampled_t = approx_variance / num_split
corrected_resampled_t = (1/num_split + self._test_size/(1 - self._test_size)) * approx_variance
return resampled_t, corrected_resampled_t
def compute_error_variance(self):
num_split = len(self._split_results)
test_error_split = np.zeros((num_split, 1)) # this list will contain the list of mu_j hat for j = 1 to J
for i in range(num_split):
test_error_split[i] = self._compute_average_test_error(self._split_results[i]['y'],
self._split_results[i]['y_hat'])
self._error_resampled_t, self._error_corrected_resampled_t = self._compute_variance(test_error_split)
return self._error_resampled_t, self._error_corrected_resampled_t
def _compute_average_test_error(self, y_list, yhat_list):
# return the average test error (denoted mu_j hat)
return float(len(np.where(y_list != yhat_list)[0]))/float(len(y_list))
def compute_accuracy_variance(self):
num_split = len(self._split_results)
test_accuracy_split = np.zeros((num_split, 1)) # this list will contain the list of mu_j hat for j = 1 to J
for i in range(num_split):
test_accuracy_split[i] = self._compute_average_test_accuracy(self._split_results[i]['y'],
self._split_results[i]['y_hat'])
self._bal_accuracy_resampled_t, self._bal_accuracy_corrected_resampled_t = self._compute_variance(test_accuracy_split)
return self._bal_accuracy_resampled_t, self._bal_accuracy_corrected_resampled_t
def _compute_average_test_accuracy(self, y_list, yhat_list):
return evaluate_prediction(y_list, yhat_list)['balanced_accuracy']
class LinearSVMAlgorithmWithoutPrecomputedKernel(ClassificationAlgorithm):
'''
Linear SVM with input X, not with kernel method for regional features.
'''
def __init__(self, x, y, balanced=True, grid_search_folds=10, c_range=np.logspace(-6, 2, 17), n_threads=15,
verbose=False):
self._x = x
self._y = y
self._balanced = balanced
self._grid_search_folds = grid_search_folds
self._c_range = c_range
self._n_threads = n_threads
self._verbose = verbose
def _launch_svc(self, x_train, x_test, y_train, y_test, c):
if self._balanced:
svc = SVC(C=c, probability=True, tol=1e-6, class_weight='balanced', kernel='linear')
else:
svc = SVC(C=c, probability=True, tol=1e-6, kernel='linear')
svc.fit(x_train, y_train)
y_hat_train = svc.predict(x_train)
y_hat = svc.predict(x_test)
proba_test = svc.predict_proba(x_test)[:, 1]
auc = roc_auc_score(y_test, proba_test)
return svc, y_hat, auc, y_hat_train
def _grid_search(self, x_train, x_test, y_train, y_test, c):
_, y_hat, _, _ = self._launch_svc(x_train, x_test, y_train, y_test, c)
ba = evaluate_prediction(y_test, y_hat)['balanced_accuracy']
return ba
def _select_best_parameter(self, async_result):
c_values = []
accuracies = []
for fold in async_result.keys():
best_c = -1
best_acc = -1
for c, async_acc in async_result[fold].items():
acc = async_acc.get()
if acc > best_acc:
best_c = c
best_acc = acc
c_values.append(best_c)
accuracies.append(best_acc)
best_acc = np.mean(accuracies)
best_c = np.power(10, np.mean(np.log10(c_values)))
return {'c': best_c, 'balanced_accuracy': best_acc}
def evaluate(self, train_index, test_index):
inner_pool = ThreadPool(self._n_threads)
async_result = {}
for i in range(self._grid_search_folds):
async_result[i] = {}
outer_x = self._x[train_index, :]
y_train = self._y[train_index]
skf = StratifiedKFold(n_splits=self._grid_search_folds, shuffle=True)
inner_cv = list(skf.split(np.zeros(len(y_train)), y_train))
for i in range(len(inner_cv)):
inner_train_index, inner_test_index = inner_cv[i]
inner_x = outer_x[inner_train_index, :]
x_test_inner = outer_x[inner_test_index, :]
y_train_inner, y_test_inner = y_train[inner_train_index], y_train[inner_test_index]
for c in self._c_range:
if self._verbose:
print("Inner CV for C=%f..." % c)
async_result[i][c] = inner_pool.apply_async(self._grid_search, args=(inner_x, x_test_inner, y_train_inner,
y_test_inner, c))
inner_pool.close()
inner_pool.join() ##
best_parameter = self._select_best_parameter(async_result)
x_test = self._x[test_index, :]
y_train, y_test = self._y[train_index], self._y[test_index]
_, y_hat, auc, y_hat_train = self._launch_svc(outer_x, x_test, y_train, y_test, best_parameter['c'])
result = dict()
result['best_parameter'] = best_parameter
result['evaluation'] = evaluate_prediction(y_test, y_hat)
result['evaluation_train'] = evaluate_prediction(y_train, y_hat_train)
result['y_hat'] = y_hat
result['y_hat_train'] = y_hat_train
result['y'] = y_test
result['y_train'] = y_train
result['y_index'] = test_index
result['x_index'] = train_index
result['auc'] = auc
return result
def apply_best_parameters(self, results_list):
best_c_list = []
bal_acc_list = []
for result in results_list:
best_c_list.append(result['best_parameter']['c'])
bal_acc_list.append(result['best_parameter']['balanced_accuracy'])
# 10^(mean of log10 of best Cs of each fold) is selected
best_c = np.power(10, np.mean(np.log10(best_c_list)))
# Mean balanced accuracy
mean_bal_acc = np.mean(bal_acc_list)
if self._balanced:
svc = SVC(C=best_c, probability=True, tol=1e-6, class_weight='balanced', kernel='linear')
else:
svc = SVC(C=best_c, probability=True, tol=1e-6, kernel='linear')
svc.fit(self._x, self._y)
return svc, {'c': best_c, 'balanced_accuracy': mean_bal_acc}
def save_classifier(self, classifier, output_dir):
np.savetxt(os.path.join(output_dir, 'dual_coefficients.txt'), classifier.dual_coef_)
np.savetxt(os.path.join(output_dir, 'support_vectors_indices.txt'), classifier.support_)
np.savetxt(os.path.join(output_dir, 'intersect.txt'), classifier.intercept_)
def save_weights(self, classifier, x, output_dir):
dual_coefficients = classifier.dual_coef_
sv_indices = classifier.support_
weighted_sv = dual_coefficients.transpose() * x[sv_indices]
weights = np.sum(weighted_sv, 0)
np.savetxt(os.path.join(output_dir, 'weights.txt'), weights)
return weights
def save_parameters(self, parameters_dict, output_dir):
with open(os.path.join(output_dir, 'best_parameters.json'), 'w') as f:
json.dump(parameters_dict, f)
| 45.244591
| 161
| 0.62624
| 5,969
| 48,095
| 4.683029
| 0.053275
| 0.02125
| 0.024148
| 0.01431
| 0.879369
| 0.858727
| 0.815512
| 0.791936
| 0.77133
| 0.749043
| 0
| 0.007103
| 0.274083
| 48,095
| 1,062
| 162
| 45.287194
| 0.793544
| 0.031812
| 0
| 0.736111
| 0
| 0.010101
| 0.08713
| 0.005908
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063131
| false
| 0
| 0.015152
| 0.002525
| 0.123737
| 0.031566
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
122d94703ed2eff6dd8c6666fe8dc298248ce44f
| 7,593
|
py
|
Python
|
sources/image_colorization/mleu_train/models/zhao_models.py
|
tramtran2/prlab_image_colorization
|
3ec7f3ad60d6235c5bb232713f1b3ec5f06f4d67
|
[
"Apache-2.0"
] | null | null | null |
sources/image_colorization/mleu_train/models/zhao_models.py
|
tramtran2/prlab_image_colorization
|
3ec7f3ad60d6235c5bb232713f1b3ec5f06f4d67
|
[
"Apache-2.0"
] | null | null | null |
sources/image_colorization/mleu_train/models/zhao_models.py
|
tramtran2/prlab_image_colorization
|
3ec7f3ad60d6235c5bb232713f1b3ec5f06f4d67
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow.keras.backend as K
import tensorflow as tf
from tensorflow.keras.layers import Input, Conv2D, BatchNormalization, UpSampling2D, Concatenate
from tensorflow.keras.models import Model
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import multi_gpu_model
from tensorflow.keras.utils import plot_model
l2_reg = l2(1e-3)
__all__ = ["zhao_vgg16_normal_build"]
def zhao_vgg16_normal_build(
input_shape = (256, 256, 1), # output default 64
kernel = 3,
n_softencoding_class = 313,
n_segmentation_class = 183,
model_name = "m",
):
input_tensor = Input(shape=input_shape)
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='conv1_1', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(input_tensor)
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='conv1_2', kernel_initializer="he_normal",
kernel_regularizer=l2_reg, strides=(2, 2))(x)
x = BatchNormalization()(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv2_1', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv2_2', kernel_initializer="he_normal",
kernel_regularizer=l2_reg,
strides=(2, 2))(x)
x = BatchNormalization()(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv3_1',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv3_2',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv3_3', kernel_initializer="he_normal",
strides=(2, 2))(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv4_1',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv4_2',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv4_3',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', dilation_rate=2, name='conv5_1',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', dilation_rate=2, name='conv5_2',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', dilation_rate=2, name='conv5_3',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = BatchNormalization()(x)
x1 = UpSampling2D(size=(2, 2))(x)
x1 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv5_1', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x1)
x1 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv5_2', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x1)
x1 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv5_3', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x1)
x1 = BatchNormalization()(x1)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', dilation_rate=2, name='conv6_1',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', dilation_rate=2, name='conv6_2',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', dilation_rate=2, name='conv6_3',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = BatchNormalization()(x)
x2 = UpSampling2D(size=(2, 2))(x)
x2 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv6_1', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x2)
x2 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv6_2', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x2)
x2 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv6_3', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x2)
x2 = BatchNormalization()(x2)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv7_1',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv7_2',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv7_3',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = BatchNormalization()(x)
x3 = UpSampling2D(size=(2, 2))(x)
x3 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv7_1', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x3)
x3 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv7_2', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x3)
x3 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv7_3', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x3)
x3 = BatchNormalization()(x3)
x = UpSampling2D(size=(2, 2))(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv8_1',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv8_2',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv8_3',
kernel_initializer="he_normal", kernel_regularizer=l2_reg)(x)
x = BatchNormalization()(x)
x4 = Concatenate()([x1, x2, x3])
x4 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv9_1', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x4)
x4 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv9_2', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x4)
x4 = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv9_3', kernel_initializer="he_normal",
kernel_regularizer=l2_reg)(x4)
x4 = BatchNormalization()(x4)
output_seg = Conv2D(n_segmentation_class, (1, 1), activation='softmax', padding='same', name='segmentation')(x4)
output_softencoding = Conv2D(n_softencoding_class, (1, 1), activation='softmax', padding='same', name='softencoding')(x)
outputs = []
outputs.append(output_softencoding)
outputs.append(output_seg)
model = Model(inputs=input_tensor, outputs=outputs, name=model_name)
return model
# zhang_vgg16_normal_build
| 58.407692
| 124
| 0.664823
| 947
| 7,593
| 5.108765
| 0.089757
| 0.081852
| 0.154609
| 0.18272
| 0.842084
| 0.808805
| 0.808805
| 0.808805
| 0.792683
| 0.778421
| 0
| 0.053828
| 0.1828
| 7,593
| 130
| 125
| 58.407692
| 0.725866
| 0.005531
| 0
| 0.354545
| 0
| 0
| 0.119751
| 0.003047
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009091
| false
| 0
| 0.063636
| 0
| 0.081818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
124bbe2b76f85191181a040d8338897ee61ceb01
| 122
|
py
|
Python
|
python/testData/inspections/AddCallSuperRequiredKeywordOnlyParamAfterSingleStarInSuperInitIsMerged_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/AddCallSuperRequiredKeywordOnlyParamAfterSingleStarInSuperInitIsMerged_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/AddCallSuperRequiredKeywordOnlyParamAfterSingleStarInSuperInitIsMerged_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class A:
def __init__(self, *, a):
pass
class B(A):
def __init__(self, a):
super().__init__(a=a)
| 15.25
| 29
| 0.52459
| 18
| 122
| 2.888889
| 0.444444
| 0.153846
| 0.307692
| 0.461538
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.311475
| 122
| 8
| 30
| 15.25
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
d646d65d95678f3711f245e96af782502155bac0
| 9,268
|
py
|
Python
|
tests/versiontest.py
|
dylanShark/pomgen
|
311dc718e4796b0b03de65a318e264df9a44bbde
|
[
"BSD-3-Clause"
] | 32
|
2020-01-17T11:00:33.000Z
|
2022-03-17T02:46:41.000Z
|
tests/versiontest.py
|
dylanShark/pomgen
|
311dc718e4796b0b03de65a318e264df9a44bbde
|
[
"BSD-3-Clause"
] | 19
|
2020-01-08T02:30:29.000Z
|
2021-02-21T08:43:40.000Z
|
tests/versiontest.py
|
dylanShark/pomgen
|
311dc718e4796b0b03de65a318e264df9a44bbde
|
[
"BSD-3-Clause"
] | 10
|
2019-12-29T06:53:56.000Z
|
2021-11-22T15:28:50.000Z
|
"""
Copyright (c) 2018, salesforce.com, inc.
All rights reserved.
SPDX-License-Identifier: BSD-3-Clause
For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
"""
import unittest
from common import version
class VersionTest(unittest.TestCase):
def test_get_release_version__semver_release(self):
self.assertEqual("1.2.3", version.get_release_version("1.2.3-SNAPSHOT"))
self.assertEqual("1.2.3", version.get_release_version("1.2.3"))
def test_get_release_version__incremental_release(self):
self.assertEqual("1.2.3-rel1", version.get_release_version("foo", last_released_version="1.2.3", incremental_release=True))
self.assertEqual("1.2.3-rel2", version.get_release_version("foo", last_released_version="1.2.3-rel1", incremental_release=True))
self.assertEqual("0.0.0-rel1", version.get_release_version("foo", last_released_version=None, incremental_release=True))
def test_get_release_version__incremental_release__multiple_digits(self):
self.assertEqual("1.2.3-rel10", version.get_release_version("foo", last_released_version="1.2.3-rel9", incremental_release=True))
self.assertEqual("1.2.3-rel11", version.get_release_version("foo", last_released_version="1.2.3-rel10", incremental_release=True))
self.assertEqual("1.2.3-rel100", version.get_release_version("foo", last_released_version="1.2.3-rel99", incremental_release=True))
def test_get_release_version__incremental_release__last_rel_qualifier_uses_old_dash_number_syntax(self):
# we used to use rel-<num>, for example rel-1, rel-2 etc
# we switched this to rel<num> (so rel1, rel2 etc) so that '-' is only
# used as a separator between version qualifiers: 1.0.0-rel1-SNAPSHOT
self.assertEqual("1.2.3-rel2", version.get_release_version("foo", last_released_version="1.2.3-rel-1", incremental_release=True))
self.assertEqual("1.2.3-rel11", version.get_release_version("foo", last_released_version="1.2.3-rel-10", incremental_release=True))
def test_get_release_version__multiple_qualifiers(self):
self.assertEqual("1.2.3-rel2-foo22", version.get_release_version("foo", last_released_version="1.2.3-rel1-foo22", incremental_release=True))
self.assertEqual("1.2.3-rel10-foo22", version.get_release_version("foo", last_released_version="1.2.3-rel9-foo22", incremental_release=True))
self.assertEqual("1.2.3-rel2-foo22", version.get_release_version("foo", last_released_version="1.2.3-rel-1-foo22", incremental_release=True))
self.assertEqual("1.2.3-rel10-foo22", version.get_release_version("foo", last_released_version="1.2.3-rel-9-foo22", incremental_release=True))
def test_get_next_dev_version__semver_release(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-SNAPSHOT", version.get_next_dev_version("1.0.0", s))
def test_get_next_dev_version__semver_release__snap(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-SNAPSHOT", version.get_next_dev_version("1.0.0-SNAPSHOT", s))
def test_get_next_dev_version__incremental_release(self):
build_pom_content = self._get_build_pom("major")
not_used = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.0.0-SNAPSHOT", version.get_next_dev_version("1.0.0", not_used, incremental_release=True))
def test_get_next_dev_version__incremental_release__snap(self):
build_pom_content = self._get_build_pom("major")
not_used = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.0.0-SNAPSHOT", version.get_next_dev_version("1.0.0-SNAPSHOT", not_used, incremental_release=True))
def test_parse_build_pom_version(self):
build_pom = """
maven_artifact(
group_id = "g1",
artifact_id = "a1",
version = "1.2.3",
)
maven_artifact_update(
version_increment_strategy = "major",
)
"""
self.assertEqual("1.2.3", version.parse_build_pom_version(build_pom))
def test_parse_build_pom_released_version(self):
content = """
released_maven_artifact(
artifact_hash = "123456789",
version = "1.2.3",
)
"""
self.assertEqual("1.2.3", version.parse_build_pom_released_version(content))
def test_get_next_version__major(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0", s("1.0.0"))
def test_get_next_version__major__reset_minor(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0", s("1.2.0"))
def test_get_next_version__major__reset_patch(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0", s("1.0.5"))
def test_get_next_version__major_snap(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-SNAPSHOT", s("1.0.0-SNAPSHOT"))
def test_get_next_version__major_snap__reset_minor(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-SNAPSHOT", s("1.2.0-SNAPSHOT"))
def test_get_next_version__major_snap__reset_patch(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-SNAPSHOT", s("1.2.5-SNAPSHOT"))
def test_get_next_version__major_qual(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-scone_60x", s("1.0.0-scone_60x"))
def test_get_next_version__major_snap_and_qual(self):
build_pom_content = self._get_build_pom("major")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.0.0-scone_60x-SNAPSHOT", s("1.0.0-scone_60x-SNAPSHOT"))
def test_get_next_version__minor(self):
build_pom_content = self._get_build_pom("minor")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.0", s("1.0.0"))
def test_get_next_version__minor__reset_patch(self):
build_pom_content = self._get_build_pom("minor")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.1.0", s("2.0.1"))
def test_get_next_version__minor_snap(self):
build_pom_content = self._get_build_pom("minor")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.0-SNAPSHOT", s("1.0.0-SNAPSHOT"))
def test_get_next_version__minor_qual(self):
build_pom_content = self._get_build_pom("minor")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.0-scone_60x", s("1.0.0-scone_60x"))
def test_get_next_version__minor_snap_and_qual(self):
build_pom_content = self._get_build_pom("minor")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.0-scone_60x-SNAPSHOT", s("1.0.0-scone_60x-SNAPSHOT"))
def test_get_next_version__minor_snap__reset_patch(self):
build_pom_content = self._get_build_pom("minor")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("2.1.0-SNAPSHOT", s("2.0.5-SNAPSHOT"))
def test_get_next_version__patch(self):
build_pom_content = self._get_build_pom("patch")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("5.3.1", s("5.3.0"))
def test_get_next_version__patch_snap(self):
build_pom_content = self._get_build_pom("patch")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.1-SNAPSHOT", s("1.1.0-SNAPSHOT"))
def test_get_next_version__patch_qual(self):
build_pom_content = self._get_build_pom("patch")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.1-scone_70x", s("1.1.0-scone_70x"))
def test_get_next_version__patch_snap_and_qual(self):
build_pom_content = self._get_build_pom("patch")
s = version.get_version_increment_strategy(build_pom_content, None)
self.assertEqual("1.1.1-scone_70x-SNAPSHOT", s("1.1.0-scone_70x-SNAPSHOT"))
def _get_build_pom(self, version_increment_strategy):
build_pom = """
maven_artifact_update(
version_increment_strategy = "%s",
)
"""
return build_pom % version_increment_strategy
if __name__ == '__main__':
unittest.main()
| 49.561497
| 150
| 0.729391
| 1,384
| 9,268
| 4.477601
| 0.090318
| 0.096821
| 0.106503
| 0.107633
| 0.880587
| 0.852348
| 0.821849
| 0.767145
| 0.734872
| 0.689043
| 0
| 0.040713
| 0.146634
| 9,268
| 186
| 151
| 49.827957
| 0.742825
| 0.043267
| 0
| 0.381295
| 0
| 0
| 0.15118
| 0.029807
| 0
| 0
| 0
| 0
| 0.273381
| 1
| 0.215827
| false
| 0
| 0.014388
| 0
| 0.244604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3c56289e56a1d5564a7f99233f3b232711d4732
| 1,096
|
py
|
Python
|
Code coach problems/Hard/Python/Digits_of_Pi.py
|
Djivs/sololearn-code-solutions
|
7727dd97f79863a88841548770481f6f2abdc7bf
|
[
"MIT"
] | 1
|
2020-07-27T07:32:57.000Z
|
2020-07-27T07:32:57.000Z
|
Code coach problems/Hard/Python/Digits_of_Pi.py
|
Djivs/sololearn-code-solutions
|
7727dd97f79863a88841548770481f6f2abdc7bf
|
[
"MIT"
] | null | null | null |
Code coach problems/Hard/Python/Digits_of_Pi.py
|
Djivs/sololearn-code-solutions
|
7727dd97f79863a88841548770481f6f2abdc7bf
|
[
"MIT"
] | 1
|
2020-11-07T12:45:21.000Z
|
2020-11-07T12:45:21.000Z
|
pi = "141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128"
pi += "4811174502841027019385211055596446229489549303819644288109756659334461284756482337867831652712019091456485669234603486104543266482133936072602491412737245870066"
pi += "0631558817488152092096282925409171536436789259036001133053054882046652138414695194151160943305727036575959195309218611738193261179310511854807446237996274956735"
pi += "1885752724 891227938183011949129833673362440656643086021394946395224737190702179860943702770539217176293176752384674818467669405132000568127145263560827785771342"
pi += "7577896091 736371787214684409012249534301465495853710507922796892589235420199561121290219608640344181598136297747713099605187072113499999983729780499510597317328"
pi += "1609631859502445945534690830264252230825334468503526193118817101000313783875288658753320838142061717766914730359825349042875546873115956286388235378759375195778"
pi += "18577805321712268066130019278766111959092164201989"
a = int(input())
print(pi[a-1])
| 109.6
| 169
| 0.941606
| 23
| 1,096
| 44.869565
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.936389
| 0.024635
| 1,096
| 9
| 170
| 121.777778
| 0.028999
| 0
| 0
| 0
| 0
| 0
| 0.914234
| 0.894161
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
612f788c74ed76d7927c70b4e63f968c05b5641e
| 168
|
py
|
Python
|
pysit/solvers/constant_density_acoustic/time/scalar/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 64
|
2015-09-08T06:23:27.000Z
|
2022-03-09T23:35:24.000Z
|
pysit/solvers/constant_density_acoustic/time/scalar/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 23
|
2015-10-08T01:14:24.000Z
|
2021-07-15T11:37:05.000Z
|
pysit/solvers/constant_density_acoustic/time/scalar/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 48
|
2015-06-25T14:48:22.000Z
|
2021-12-06T19:50:25.000Z
|
from .constant_density_acoustic_time_scalar_1D import *
from .constant_density_acoustic_time_scalar_2D import *
from .constant_density_acoustic_time_scalar_3D import *
| 42
| 55
| 0.892857
| 24
| 168
| 5.625
| 0.416667
| 0.266667
| 0.422222
| 0.6
| 0.911111
| 0.911111
| 0.637037
| 0
| 0
| 0
| 0
| 0.019231
| 0.071429
| 168
| 3
| 56
| 56
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
61428bc5b7800fcbe65650de9abc61f77cd30892
| 154
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_1/_pkg1_1_1_1_0/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_1/_pkg1_1_1_1_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_1/_pkg1_1_1_1/_pkg1_1_1_1_0/__init__.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from ._mod1_1_1_1_0_0 import *
from ._mod1_1_1_1_0_1 import *
from ._mod1_1_1_1_0_2 import *
from ._mod1_1_1_1_0_3 import *
from ._mod1_1_1_1_0_4 import *
| 30.8
| 30
| 0.811688
| 40
| 154
| 2.375
| 0.2
| 0.210526
| 0.473684
| 0.526316
| 0.884211
| 0.884211
| 0.757895
| 0
| 0
| 0
| 0
| 0.222222
| 0.123377
| 154
| 5
| 31
| 30.8
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
618a5ffc14fc38e9ac444013de578dec143cb801
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_azir/na_azir_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_azir/na_azir_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_azir/na_azir_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Azir_Jng_Aatrox(Ratings):
pass
class NA_Azir_Jng_Ahri(Ratings):
pass
class NA_Azir_Jng_Akali(Ratings):
pass
class NA_Azir_Jng_Alistar(Ratings):
pass
class NA_Azir_Jng_Amumu(Ratings):
pass
class NA_Azir_Jng_Anivia(Ratings):
pass
class NA_Azir_Jng_Annie(Ratings):
pass
class NA_Azir_Jng_Ashe(Ratings):
pass
class NA_Azir_Jng_AurelionSol(Ratings):
pass
class NA_Azir_Jng_Azir(Ratings):
pass
class NA_Azir_Jng_Bard(Ratings):
pass
class NA_Azir_Jng_Blitzcrank(Ratings):
pass
class NA_Azir_Jng_Brand(Ratings):
pass
class NA_Azir_Jng_Braum(Ratings):
pass
class NA_Azir_Jng_Caitlyn(Ratings):
pass
class NA_Azir_Jng_Camille(Ratings):
pass
class NA_Azir_Jng_Cassiopeia(Ratings):
pass
class NA_Azir_Jng_Chogath(Ratings):
pass
class NA_Azir_Jng_Corki(Ratings):
pass
class NA_Azir_Jng_Darius(Ratings):
pass
class NA_Azir_Jng_Diana(Ratings):
pass
class NA_Azir_Jng_Draven(Ratings):
pass
class NA_Azir_Jng_DrMundo(Ratings):
pass
class NA_Azir_Jng_Ekko(Ratings):
pass
class NA_Azir_Jng_Elise(Ratings):
pass
class NA_Azir_Jng_Evelynn(Ratings):
pass
class NA_Azir_Jng_Ezreal(Ratings):
pass
class NA_Azir_Jng_Fiddlesticks(Ratings):
pass
class NA_Azir_Jng_Fiora(Ratings):
pass
class NA_Azir_Jng_Fizz(Ratings):
pass
class NA_Azir_Jng_Galio(Ratings):
pass
class NA_Azir_Jng_Gangplank(Ratings):
pass
class NA_Azir_Jng_Garen(Ratings):
pass
class NA_Azir_Jng_Gnar(Ratings):
pass
class NA_Azir_Jng_Gragas(Ratings):
pass
class NA_Azir_Jng_Graves(Ratings):
pass
class NA_Azir_Jng_Hecarim(Ratings):
pass
class NA_Azir_Jng_Heimerdinger(Ratings):
pass
class NA_Azir_Jng_Illaoi(Ratings):
pass
class NA_Azir_Jng_Irelia(Ratings):
pass
class NA_Azir_Jng_Ivern(Ratings):
pass
class NA_Azir_Jng_Janna(Ratings):
pass
class NA_Azir_Jng_JarvanIV(Ratings):
pass
class NA_Azir_Jng_Jax(Ratings):
pass
class NA_Azir_Jng_Jayce(Ratings):
pass
class NA_Azir_Jng_Jhin(Ratings):
pass
class NA_Azir_Jng_Jinx(Ratings):
pass
class NA_Azir_Jng_Kalista(Ratings):
pass
class NA_Azir_Jng_Karma(Ratings):
pass
class NA_Azir_Jng_Karthus(Ratings):
pass
class NA_Azir_Jng_Kassadin(Ratings):
pass
class NA_Azir_Jng_Katarina(Ratings):
pass
class NA_Azir_Jng_Kayle(Ratings):
pass
class NA_Azir_Jng_Kayn(Ratings):
pass
class NA_Azir_Jng_Kennen(Ratings):
pass
class NA_Azir_Jng_Khazix(Ratings):
pass
class NA_Azir_Jng_Kindred(Ratings):
pass
class NA_Azir_Jng_Kled(Ratings):
pass
class NA_Azir_Jng_KogMaw(Ratings):
pass
class NA_Azir_Jng_Leblanc(Ratings):
pass
class NA_Azir_Jng_LeeSin(Ratings):
pass
class NA_Azir_Jng_Leona(Ratings):
pass
class NA_Azir_Jng_Lissandra(Ratings):
pass
class NA_Azir_Jng_Lucian(Ratings):
pass
class NA_Azir_Jng_Lulu(Ratings):
pass
class NA_Azir_Jng_Lux(Ratings):
pass
class NA_Azir_Jng_Malphite(Ratings):
pass
class NA_Azir_Jng_Malzahar(Ratings):
pass
class NA_Azir_Jng_Maokai(Ratings):
pass
class NA_Azir_Jng_MasterYi(Ratings):
pass
class NA_Azir_Jng_MissFortune(Ratings):
pass
class NA_Azir_Jng_MonkeyKing(Ratings):
pass
class NA_Azir_Jng_Mordekaiser(Ratings):
pass
class NA_Azir_Jng_Morgana(Ratings):
pass
class NA_Azir_Jng_Nami(Ratings):
pass
class NA_Azir_Jng_Nasus(Ratings):
pass
class NA_Azir_Jng_Nautilus(Ratings):
pass
class NA_Azir_Jng_Nidalee(Ratings):
pass
class NA_Azir_Jng_Nocturne(Ratings):
pass
class NA_Azir_Jng_Nunu(Ratings):
pass
class NA_Azir_Jng_Olaf(Ratings):
pass
class NA_Azir_Jng_Orianna(Ratings):
pass
class NA_Azir_Jng_Ornn(Ratings):
pass
class NA_Azir_Jng_Pantheon(Ratings):
pass
class NA_Azir_Jng_Poppy(Ratings):
pass
class NA_Azir_Jng_Quinn(Ratings):
pass
class NA_Azir_Jng_Rakan(Ratings):
pass
class NA_Azir_Jng_Rammus(Ratings):
pass
class NA_Azir_Jng_RekSai(Ratings):
pass
class NA_Azir_Jng_Renekton(Ratings):
pass
class NA_Azir_Jng_Rengar(Ratings):
pass
class NA_Azir_Jng_Riven(Ratings):
pass
class NA_Azir_Jng_Rumble(Ratings):
pass
class NA_Azir_Jng_Ryze(Ratings):
pass
class NA_Azir_Jng_Sejuani(Ratings):
pass
class NA_Azir_Jng_Shaco(Ratings):
pass
class NA_Azir_Jng_Shen(Ratings):
pass
class NA_Azir_Jng_Shyvana(Ratings):
pass
class NA_Azir_Jng_Singed(Ratings):
pass
class NA_Azir_Jng_Sion(Ratings):
pass
class NA_Azir_Jng_Sivir(Ratings):
pass
class NA_Azir_Jng_Skarner(Ratings):
pass
class NA_Azir_Jng_Sona(Ratings):
pass
class NA_Azir_Jng_Soraka(Ratings):
pass
class NA_Azir_Jng_Swain(Ratings):
pass
class NA_Azir_Jng_Syndra(Ratings):
pass
class NA_Azir_Jng_TahmKench(Ratings):
pass
class NA_Azir_Jng_Taliyah(Ratings):
pass
class NA_Azir_Jng_Talon(Ratings):
pass
class NA_Azir_Jng_Taric(Ratings):
pass
class NA_Azir_Jng_Teemo(Ratings):
pass
class NA_Azir_Jng_Thresh(Ratings):
pass
class NA_Azir_Jng_Tristana(Ratings):
pass
class NA_Azir_Jng_Trundle(Ratings):
pass
class NA_Azir_Jng_Tryndamere(Ratings):
pass
class NA_Azir_Jng_TwistedFate(Ratings):
pass
class NA_Azir_Jng_Twitch(Ratings):
pass
class NA_Azir_Jng_Udyr(Ratings):
pass
class NA_Azir_Jng_Urgot(Ratings):
pass
class NA_Azir_Jng_Varus(Ratings):
pass
class NA_Azir_Jng_Vayne(Ratings):
pass
class NA_Azir_Jng_Veigar(Ratings):
pass
class NA_Azir_Jng_Velkoz(Ratings):
pass
class NA_Azir_Jng_Vi(Ratings):
pass
class NA_Azir_Jng_Viktor(Ratings):
pass
class NA_Azir_Jng_Vladimir(Ratings):
pass
class NA_Azir_Jng_Volibear(Ratings):
pass
class NA_Azir_Jng_Warwick(Ratings):
pass
class NA_Azir_Jng_Xayah(Ratings):
pass
class NA_Azir_Jng_Xerath(Ratings):
pass
class NA_Azir_Jng_XinZhao(Ratings):
pass
class NA_Azir_Jng_Yasuo(Ratings):
pass
class NA_Azir_Jng_Yorick(Ratings):
pass
class NA_Azir_Jng_Zac(Ratings):
pass
class NA_Azir_Jng_Zed(Ratings):
pass
class NA_Azir_Jng_Ziggs(Ratings):
pass
class NA_Azir_Jng_Zilean(Ratings):
pass
class NA_Azir_Jng_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
4eef564dcbec2a2712802b4e44dad3b7b72fe0c7
| 4,011
|
py
|
Python
|
jmeter_api/thread_groups/common_thread_group/test_common_thread_group.py
|
antondmtvch/yah2j
|
3ed2e82a2c72b93ad1f11905ff37b79c386b0a58
|
[
"MIT"
] | null | null | null |
jmeter_api/thread_groups/common_thread_group/test_common_thread_group.py
|
antondmtvch/yah2j
|
3ed2e82a2c72b93ad1f11905ff37b79c386b0a58
|
[
"MIT"
] | null | null | null |
jmeter_api/thread_groups/common_thread_group/test_common_thread_group.py
|
antondmtvch/yah2j
|
3ed2e82a2c72b93ad1f11905ff37b79c386b0a58
|
[
"MIT"
] | null | null | null |
import xmltodict
import pytest
from jmeter_api.thread_groups.common_thread_group.elements import CommonThreadGroup
from jmeter_api.basics.utils import tag_wrapper
class TestCommonThreadGroopArgs:
class TestContinueForever:
def test_check(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever="True")
def test_check2(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever="123")
def test_positive(self):
CommonThreadGroup(continue_forever=True)
class TestLoops:
def test_check(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, loops="1")
def test_check2(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, loops="a")
def test_positive(self):
CommonThreadGroup(continue_forever=True, loops=23)
class TestIsShedulerEnable:
def test_check(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True,
is_sheduler_enable="True")
def test_check2(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True,
is_sheduler_enable="123")
def test_positive(self):
CommonThreadGroup(continue_forever=True, is_sheduler_enable=True)
class TestLoops:
def test_check(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, loops="1")
def test_check2(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, loops="a")
def test_positive(self):
CommonThreadGroup(continue_forever=True, loops=23)
class TestShedulerDuration:
def test_check(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, sheduler_duration="1")
def test_check2(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, sheduler_duration="a")
def test_positive(self):
CommonThreadGroup(continue_forever=True, sheduler_duration=23)
class TestShedulerDelay:
def test_check(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, sheduler_delay="1")
def test_check2(self):
with pytest.raises(TypeError):
CommonThreadGroup(continue_forever=True, sheduler_delay="a")
def test_positive(self):
CommonThreadGroup(continue_forever=True, sheduler_delay=23)
class TestCommonThreadGroopRender:
def test_loops(self):
element = CommonThreadGroup(
continue_forever=True, loops=55, sheduler_duration=1000, sheduler_delay=2000)
rendered_doc = element.to_xml()
parsed_doc = xmltodict.parse(
tag_wrapper(rendered_doc, 'test_results'))
assert parsed_doc['test_results']['ThreadGroup']['elementProp']['stringProp']['#text'] == '55'
def test_sheduler_duration(self):
element = CommonThreadGroup(
continue_forever=True, loops=55, sheduler_duration=1000, sheduler_delay=2000)
rendered_doc = element.to_xml()
parsed_doc = xmltodict.parse(
tag_wrapper(rendered_doc, 'test_results'))
assert parsed_doc['test_results']['ThreadGroup']['stringProp'][4]['#text'] == '1000'
def test_sheduler_delay(self):
element = CommonThreadGroup(
continue_forever=True, loops=55, sheduler_duration=1000, sheduler_delay=2000)
rendered_doc = element.to_xml()
parsed_doc = xmltodict.parse(
tag_wrapper(rendered_doc, 'test_results'))
assert parsed_doc['test_results']['ThreadGroup']['stringProp'][5]['#text'] == '2000'
| 37.138889
| 102
| 0.649963
| 401
| 4,011
| 6.27182
| 0.159601
| 0.058449
| 0.267197
| 0.286282
| 0.844135
| 0.844135
| 0.838966
| 0.838966
| 0.832604
| 0.788072
| 0
| 0.022155
| 0.257292
| 4,011
| 107
| 103
| 37.485981
| 0.822088
| 0
| 0
| 0.662651
| 0
| 0
| 0.048118
| 0
| 0
| 0
| 0
| 0
| 0.036145
| 1
| 0.253012
| false
| 0
| 0.048193
| 0
| 0.39759
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9c97959c500a174ae48fa684dca9aefba0bbb226
| 41,103
|
py
|
Python
|
criteo_marketing/api/campaigns_api.py
|
criteo/criteo-python-marketing-sdk
|
1093f86cf035cb6ce657b47f0f5e768c1fc2271c
|
[
"Apache-2.0"
] | 14
|
2018-12-03T14:19:59.000Z
|
2022-02-10T18:11:48.000Z
|
criteo_marketing/api/campaigns_api.py
|
criteo/criteo-python-marketing-sdk
|
1093f86cf035cb6ce657b47f0f5e768c1fc2271c
|
[
"Apache-2.0"
] | 6
|
2019-01-10T18:11:01.000Z
|
2021-05-19T09:15:23.000Z
|
criteo_marketing/api/campaigns_api.py
|
criteo/criteo-python-marketing-sdk
|
1093f86cf035cb6ce657b47f0f5e768c1fc2271c
|
[
"Apache-2.0"
] | 10
|
2019-05-01T05:07:54.000Z
|
2022-02-21T11:16:47.000Z
|
# coding: utf-8
"""
Marketing API v.1.0
IMPORTANT: This swagger links to Criteo production environment. Any test applied here will thus impact real campaigns. # noqa: E501
The version of the OpenAPI document: v.1.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from criteo_marketing.api_client import ApiClient
from criteo_marketing.exceptions import (
ApiTypeError,
ApiValueError
)
class CampaignsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_bids(self, authorization, **kwargs): # noqa: E501
"""Gets a the bids for campaigns and their categories # noqa: E501
Get the campaigns' bids, as well as the bids of their categories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bids(authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str authorization: JWT Bearer Token (required)
:param str campaign_ids: Optional. The ids of the campaigns we want to get the bids on. If not specified, advertiserIds will be used.
:param str advertiser_ids: Optional. The ids of the advertisers' campaigns we want to get the bids on. If campaignIds not specified, and neither is advertiserIds, all the advertisers in the user's portfolio are used.
:param str category_hash_codes: Optional. Filters only specified categories. By default no filtering is applied.
:param str bid_type: Optional. Filters by bid type. By default no filtering is applied.
:param str campaign_status: Optional. Filters by campaign status. By default no filtering is applied.
:param bool pending_changes: Optional. Filters only pending changes or settled ones. By default no filtering is applied.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[CampaignBidMessage]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_bids_with_http_info(authorization, **kwargs) # noqa: E501
def get_bids_with_http_info(self, authorization, **kwargs): # noqa: E501
"""Gets a the bids for campaigns and their categories # noqa: E501
Get the campaigns' bids, as well as the bids of their categories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_bids_with_http_info(authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str authorization: JWT Bearer Token (required)
:param str campaign_ids: Optional. The ids of the campaigns we want to get the bids on. If not specified, advertiserIds will be used.
:param str advertiser_ids: Optional. The ids of the advertisers' campaigns we want to get the bids on. If campaignIds not specified, and neither is advertiserIds, all the advertisers in the user's portfolio are used.
:param str category_hash_codes: Optional. Filters only specified categories. By default no filtering is applied.
:param str bid_type: Optional. Filters by bid type. By default no filtering is applied.
:param str campaign_status: Optional. Filters by campaign status. By default no filtering is applied.
:param bool pending_changes: Optional. Filters only pending changes or settled ones. By default no filtering is applied.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[CampaignBidMessage], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['authorization', 'campaign_ids', 'advertiser_ids', 'category_hash_codes', 'bid_type', 'campaign_status', 'pending_changes'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_bids" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'authorization' is set
if ('authorization' not in local_var_params or
local_var_params['authorization'] is None):
raise ApiValueError("Missing the required parameter `authorization` when calling `get_bids`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'campaign_ids' in local_var_params:
query_params.append(('campaignIds', local_var_params['campaign_ids'])) # noqa: E501
if 'advertiser_ids' in local_var_params:
query_params.append(('advertiserIds', local_var_params['advertiser_ids'])) # noqa: E501
if 'category_hash_codes' in local_var_params:
query_params.append(('categoryHashCodes', local_var_params['category_hash_codes'])) # noqa: E501
if 'bid_type' in local_var_params:
query_params.append(('bidType', local_var_params['bid_type'])) # noqa: E501
if 'campaign_status' in local_var_params:
query_params.append(('campaignStatus', local_var_params['campaign_status'])) # noqa: E501
if 'pending_changes' in local_var_params:
query_params.append(('pendingChanges', local_var_params['pending_changes'])) # noqa: E501
header_params = {}
if 'authorization' in local_var_params:
header_params['Authorization'] = local_var_params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml', 'text/html']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1/campaigns/bids', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CampaignBidMessage]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign(self, campaign_id, authorization, **kwargs): # noqa: E501
"""Gets a specific campaign # noqa: E501
Get a specific campaign # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign(campaign_id, authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int campaign_id: Mandatory. The id of the campaign to return. (required)
:param str authorization: JWT Bearer Token (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CampaignMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_campaign_with_http_info(campaign_id, authorization, **kwargs) # noqa: E501
def get_campaign_with_http_info(self, campaign_id, authorization, **kwargs): # noqa: E501
"""Gets a specific campaign # noqa: E501
Get a specific campaign # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_with_http_info(campaign_id, authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int campaign_id: Mandatory. The id of the campaign to return. (required)
:param str authorization: JWT Bearer Token (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CampaignMessage, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['campaign_id', 'authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in local_var_params or
local_var_params['campaign_id'] is None):
raise ApiValueError("Missing the required parameter `campaign_id` when calling `get_campaign`") # noqa: E501
# verify the required parameter 'authorization' is set
if ('authorization' not in local_var_params or
local_var_params['authorization'] is None):
raise ApiValueError("Missing the required parameter `authorization` when calling `get_campaign`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in local_var_params:
path_params['campaignId'] = local_var_params['campaign_id'] # noqa: E501
query_params = []
header_params = {}
if 'authorization' in local_var_params:
header_params['Authorization'] = local_var_params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml', 'text/html']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1/campaigns/{campaignId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CampaignMessage', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaigns(self, authorization, **kwargs): # noqa: E501
"""Gets campaigns # noqa: E501
Get the list of campaigns with the specified filters. If a campaign is requested but is missing from current user's portfolio, it will not be included in the list. If neither campaign ids nor advertisers ids are provided, then the user's portfolio will be used. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaigns(authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str authorization: JWT Bearer Token (required)
:param str campaign_ids: Optional. One or more campaign ids, E.g., 78, 12932, 45236. If the campaign ids requested are not linked to advertisers in the user's portfolio, they will be skipped.
:param str advertiser_ids: Optional. One or more advertiser ids, E.g., 78, 12932, 45236. If the advertiser ids requested are not part of the user's portfolio, they will be skipped.
:param str campaign_status: Optional. Filters by campaign status. By default no filtering is applied.
:param str bid_type: Optional. Filters by campaign bid type. By default, no filtering is applied.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[CampaignMessage]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_campaigns_with_http_info(authorization, **kwargs) # noqa: E501
def get_campaigns_with_http_info(self, authorization, **kwargs): # noqa: E501
"""Gets campaigns # noqa: E501
Get the list of campaigns with the specified filters. If a campaign is requested but is missing from current user's portfolio, it will not be included in the list. If neither campaign ids nor advertisers ids are provided, then the user's portfolio will be used. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaigns_with_http_info(authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str authorization: JWT Bearer Token (required)
:param str campaign_ids: Optional. One or more campaign ids, E.g., 78, 12932, 45236. If the campaign ids requested are not linked to advertisers in the user's portfolio, they will be skipped.
:param str advertiser_ids: Optional. One or more advertiser ids, E.g., 78, 12932, 45236. If the advertiser ids requested are not part of the user's portfolio, they will be skipped.
:param str campaign_status: Optional. Filters by campaign status. By default no filtering is applied.
:param str bid_type: Optional. Filters by campaign bid type. By default, no filtering is applied.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[CampaignMessage], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['authorization', 'campaign_ids', 'advertiser_ids', 'campaign_status', 'bid_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaigns" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'authorization' is set
if ('authorization' not in local_var_params or
local_var_params['authorization'] is None):
raise ApiValueError("Missing the required parameter `authorization` when calling `get_campaigns`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'campaign_ids' in local_var_params:
query_params.append(('campaignIds', local_var_params['campaign_ids'])) # noqa: E501
if 'advertiser_ids' in local_var_params:
query_params.append(('advertiserIds', local_var_params['advertiser_ids'])) # noqa: E501
if 'campaign_status' in local_var_params:
query_params.append(('campaignStatus', local_var_params['campaign_status'])) # noqa: E501
if 'bid_type' in local_var_params:
query_params.append(('bidType', local_var_params['bid_type'])) # noqa: E501
header_params = {}
if 'authorization' in local_var_params:
header_params['Authorization'] = local_var_params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml', 'text/html']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1/campaigns', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CampaignMessage]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_categories(self, campaign_id, authorization, **kwargs): # noqa: E501
"""Gets categories # noqa: E501
Get the list of categories linked to the requested campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_categories(campaign_id, authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int campaign_id: Mandatory. The id of the campaign the categories are linked to. (required)
:param str authorization: JWT Bearer Token (required)
:param bool enabled_only: Optional. Returns only categories you can bid on. Defaults to false.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CategoryMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_categories_with_http_info(campaign_id, authorization, **kwargs) # noqa: E501
def get_categories_with_http_info(self, campaign_id, authorization, **kwargs): # noqa: E501
"""Gets categories # noqa: E501
Get the list of categories linked to the requested campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_categories_with_http_info(campaign_id, authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int campaign_id: Mandatory. The id of the campaign the categories are linked to. (required)
:param str authorization: JWT Bearer Token (required)
:param bool enabled_only: Optional. Returns only categories you can bid on. Defaults to false.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CategoryMessage, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['campaign_id', 'authorization', 'enabled_only'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_categories" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in local_var_params or
local_var_params['campaign_id'] is None):
raise ApiValueError("Missing the required parameter `campaign_id` when calling `get_categories`") # noqa: E501
# verify the required parameter 'authorization' is set
if ('authorization' not in local_var_params or
local_var_params['authorization'] is None):
raise ApiValueError("Missing the required parameter `authorization` when calling `get_categories`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in local_var_params:
path_params['campaignId'] = local_var_params['campaign_id'] # noqa: E501
query_params = []
if 'enabled_only' in local_var_params:
query_params.append(('enabledOnly', local_var_params['enabled_only'])) # noqa: E501
header_params = {}
if 'authorization' in local_var_params:
header_params['Authorization'] = local_var_params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml', 'text/html']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1/campaigns/{campaignId}/categories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryMessage', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_category(self, campaign_id, category_hash_code, authorization, **kwargs): # noqa: E501
"""Gets a specific category # noqa: E501
Get a specific category linked to the requested campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category(campaign_id, category_hash_code, authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int campaign_id: Mandatory. The id of the campaign the categories are linked to. (required)
:param int category_hash_code: Mandatory. The id of the category to return. (required)
:param str authorization: JWT Bearer Token (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CategoryMessage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_category_with_http_info(campaign_id, category_hash_code, authorization, **kwargs) # noqa: E501
def get_category_with_http_info(self, campaign_id, category_hash_code, authorization, **kwargs): # noqa: E501
"""Gets a specific category # noqa: E501
Get a specific category linked to the requested campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_category_with_http_info(campaign_id, category_hash_code, authorization, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int campaign_id: Mandatory. The id of the campaign the categories are linked to. (required)
:param int category_hash_code: Mandatory. The id of the category to return. (required)
:param str authorization: JWT Bearer Token (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CategoryMessage, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['campaign_id', 'category_hash_code', 'authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_category" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in local_var_params or
local_var_params['campaign_id'] is None):
raise ApiValueError("Missing the required parameter `campaign_id` when calling `get_category`") # noqa: E501
# verify the required parameter 'category_hash_code' is set
if ('category_hash_code' not in local_var_params or
local_var_params['category_hash_code'] is None):
raise ApiValueError("Missing the required parameter `category_hash_code` when calling `get_category`") # noqa: E501
# verify the required parameter 'authorization' is set
if ('authorization' not in local_var_params or
local_var_params['authorization'] is None):
raise ApiValueError("Missing the required parameter `authorization` when calling `get_category`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in local_var_params:
path_params['campaignId'] = local_var_params['campaign_id'] # noqa: E501
if 'category_hash_code' in local_var_params:
path_params['categoryHashCode'] = local_var_params['category_hash_code'] # noqa: E501
query_params = []
header_params = {}
if 'authorization' in local_var_params:
header_params['Authorization'] = local_var_params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml', 'text/html']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1/campaigns/{campaignId}/categories/{categoryHashCode}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryMessage', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_bids(self, authorization, bid_changes, **kwargs): # noqa: E501
"""Update bids for campaigns and their categories # noqa: E501
If a campaign bid is updated, all (if any) category bids for this campaign will be updated with the new value if they are initially equal to the campaign bid. If the category bid is not wanted to be cascaded to the categories with the same bid value, new change bids must be added in the request for the categories where the value should be kept (with the initial value). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_bids(authorization, bid_changes, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str authorization: JWT Bearer Token (required)
:param list[CampaignBidChangeRequest] bid_changes: Specifies the list of bid changes to be applied. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[CampaignMessage]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_bids_with_http_info(authorization, bid_changes, **kwargs) # noqa: E501
def update_bids_with_http_info(self, authorization, bid_changes, **kwargs): # noqa: E501
"""Update bids for campaigns and their categories # noqa: E501
If a campaign bid is updated, all (if any) category bids for this campaign will be updated with the new value if they are initially equal to the campaign bid. If the category bid is not wanted to be cascaded to the categories with the same bid value, new change bids must be added in the request for the categories where the value should be kept (with the initial value). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_bids_with_http_info(authorization, bid_changes, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str authorization: JWT Bearer Token (required)
:param list[CampaignBidChangeRequest] bid_changes: Specifies the list of bid changes to be applied. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[CampaignMessage], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['authorization', 'bid_changes'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_bids" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'authorization' is set
if ('authorization' not in local_var_params or
local_var_params['authorization'] is None):
raise ApiValueError("Missing the required parameter `authorization` when calling `update_bids`") # noqa: E501
# verify the required parameter 'bid_changes' is set
if ('bid_changes' not in local_var_params or
local_var_params['bid_changes'] is None):
raise ApiValueError("Missing the required parameter `bid_changes` when calling `update_bids`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'authorization' in local_var_params:
header_params['Authorization'] = local_var_params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'bid_changes' in local_var_params:
body_params = local_var_params['bid_changes']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/json', 'application/xml', 'text/xml', 'application/x-www-form-urlencoded', 'text/html']) # noqa: E501
# Authentication setting
auth_settings = ['Authorization'] # noqa: E501
return self.api_client.call_api(
'/v1/campaigns/bids', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CampaignMessage]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 53.104651
| 394
| 0.637739
| 4,846
| 41,103
| 5.198514
| 0.056335
| 0.040013
| 0.063353
| 0.020959
| 0.954271
| 0.946253
| 0.934741
| 0.930811
| 0.922039
| 0.912472
| 0
| 0.013564
| 0.287935
| 41,103
| 773
| 395
| 53.173351
| 0.847171
| 0.49101
| 0
| 0.76
| 0
| 0
| 0.235405
| 0.032977
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037143
| false
| 0
| 0.014286
| 0
| 0.088571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9cc644272949bbaf742c6a9eddffc5b963764f68
| 18,537
|
py
|
Python
|
static_methods/external_tools.py
|
david-house-harvard/canvas_python_sdk
|
9c2e59621a9a5667bc43c253ef801482b241a2c1
|
[
"MIT"
] | 21
|
2015-06-12T13:49:04.000Z
|
2021-11-08T05:37:44.000Z
|
static_methods/external_tools.py
|
david-house-harvard/canvas_python_sdk
|
9c2e59621a9a5667bc43c253ef801482b241a2c1
|
[
"MIT"
] | 44
|
2015-02-04T15:26:52.000Z
|
2021-12-03T17:47:00.000Z
|
static_methods/external_tools.py
|
david-house-harvard/canvas_python_sdk
|
9c2e59621a9a5667bc43c253ef801482b241a2c1
|
[
"MIT"
] | 7
|
2015-07-20T23:56:03.000Z
|
2021-02-23T17:13:00.000Z
|
from canvas_sdk import client, utils
def edit_external_tool_courses(request_ctx, course_id, external_tool_id, name=None, privacy_level=None, consumer_key=None, shared_secret=None, description=None, url=None, domain=None, icon_url=None, text=None, custom_fields=None, account_navigation_url=None, account_navigation_enabled=None, account_navigation_text=None, user_navigation_url=None, user_navigation_enabled=None, user_navigation_text=None, course_navigation_url=None, course_navigation_enabled=None, course_navigation_text=None, course_navigation_visibility=None, course_navigation_default=None, editor_button_url=None, editor_button_enabled=None, editor_button_icon_url=None, editor_button_selection_width=None, editor_button_selection_height=None, resource_selection_url=None, resource_selection_enabled=None, resource_selection_icon_url=None, resource_selection_selection_width=None, resource_selection_selection_height=None, config_type=None, config_xml=None, config_url=None, **request_kwargs):
"""
Update the specified external tool. Uses same parameters as create
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param course_id: (required) ID
:type course_id: string
:param external_tool_id: (required) ID
:type external_tool_id: string
:param name: (optional) The name of the tool
:type name: string or None
:param privacy_level: (optional) What information to send to the external tool.
:type privacy_level: string or None
:param consumer_key: (optional) The consumer key for the external tool
:type consumer_key: string or None
:param shared_secret: (optional) The shared secret with the external tool
:type shared_secret: string or None
:param description: (optional) A description of the tool
:type description: string or None
:param url: (optional) The url to match links against. Either "url" or "domain" should be set, not both.
:type url: string or None
:param domain: (optional) The domain to match links against. Either "url" or "domain" should be set, not both.
:type domain: string or None
:param icon_url: (optional) The url of the icon to show for this tool
:type icon_url: string or None
:param text: (optional) The default text to show for this tool
:type text: string or None
:param custom_fields: (optional) Custom fields that will be sent to the tool consumer, specified as custom_fields[field_name]
:type custom_fields: string or None
:param account_navigation_url: (optional) The url of the external tool for account navigation
:type account_navigation_url: string or None
:param account_navigation_enabled: (optional) Set this to enable this feature
:type account_navigation_enabled: boolean or None
:param account_navigation_text: (optional) The text that will show on the left-tab in the account navigation
:type account_navigation_text: string or None
:param user_navigation_url: (optional) The url of the external tool for user navigation
:type user_navigation_url: string or None
:param user_navigation_enabled: (optional) Set this to enable this feature
:type user_navigation_enabled: boolean or None
:param user_navigation_text: (optional) The text that will show on the left-tab in the user navigation
:type user_navigation_text: string or None
:param course_navigation_url: (optional) The url of the external tool for course navigation
:type course_navigation_url: string or None
:param course_navigation_enabled: (optional) Set this to enable this feature
:type course_navigation_enabled: boolean or None
:param course_navigation_text: (optional) The text that will show on the left-tab in the course navigation
:type course_navigation_text: string or None
:param course_navigation_visibility: (optional) Who will see the navigation tab. "admins" for course admins, "members" for students, null for everyone
:type course_navigation_visibility: string or None
:param course_navigation_default: (optional) Whether the navigation option will show in the course by default or whether the teacher will have to explicitly enable it
:type course_navigation_default: boolean or None
:param editor_button_url: (optional) The url of the external tool
:type editor_button_url: string or None
:param editor_button_enabled: (optional) Set this to enable this feature
:type editor_button_enabled: boolean or None
:param editor_button_icon_url: (optional) The url of the icon to show in the WYSIWYG editor
:type editor_button_icon_url: string or None
:param editor_button_selection_width: (optional) The width of the dialog the tool is launched in
:type editor_button_selection_width: string or None
:param editor_button_selection_height: (optional) The height of the dialog the tool is launched in
:type editor_button_selection_height: string or None
:param resource_selection_url: (optional) The url of the external tool
:type resource_selection_url: string or None
:param resource_selection_enabled: (optional) Set this to enable this feature
:type resource_selection_enabled: boolean or None
:param resource_selection_icon_url: (optional) The url of the icon to show in the module external tool list
:type resource_selection_icon_url: string or None
:param resource_selection_selection_width: (optional) The width of the dialog the tool is launched in
:type resource_selection_selection_width: string or None
:param resource_selection_selection_height: (optional) The height of the dialog the tool is launched in
:type resource_selection_selection_height: string or None
:param config_type: (optional) Configuration can be passed in as CC xml instead of using query parameters. If this value is "by_url" or "by_xml" then an xml configuration will be expected in either the "config_xml" or "config_url" parameter. Note that the name parameter overrides the tool name provided in the xml
:type config_type: string or None
:param config_xml: (optional) XML tool configuration, as specified in the CC xml specification. This is required if "config_type" is set to "by_xml"
:type config_xml: string or None
:param config_url: (optional) URL where the server can retrieve an XML tool configuration, as specified in the CC xml specification. This is required if "config_type" is set to "by_url"
:type config_url: string or None
:return: Edit an external tool
:rtype: requests.Response (with void data)
"""
privacy_level_types = ('anonymous', 'name_only', 'public')
course_navigation_visibility_types = ('admins', 'members')
utils.validate_attr_is_acceptable(privacy_level, privacy_level_types)
utils.validate_attr_is_acceptable(course_navigation_visibility, course_navigation_visibility_types)
path = '/v1/courses/{course_id}/external_tools/{external_tool_id}'
payload = {
'name': name,
'privacy_level': privacy_level,
'consumer_key': consumer_key,
'shared_secret': shared_secret,
'description': description,
'url': url,
'domain': domain,
'icon_url': icon_url,
'text' : text,
'custom_fields' : custom_fields,
'account_navigation[url]' : account_navigation_url,
'account_navigation[enabled]' : account_navigation_enabled,
'account_navigation[text]' : account_navigation_text,
'user_navigation[url]' : user_navigation_url,
'user_navigation[enabled]' : user_navigation_enabled,
'user_navigation[text]' : user_navigation_text,
'course_navigation[url]' : course_navigation_url,
'course_navigation[enabled]' : course_navigation_enabled,
'course_navigation[text]' : course_navigation_text,
'course_navigation[visibility]' : course_navigation_visibility,
'course_navigation[default]' : course_navigation_default,
'editor_button[url]' : editor_button_url,
'editor_button[enabled]' : editor_button_enabled,
'editor_button[icon_url]' : editor_button_icon_url,
'editor_button[selection_width]' : editor_button_selection_width,
'editor_button[selection_height]' : editor_button_selection_height,
'resource_selection[url]' : resource_selection_url,
'resource_selection[enabled]' : resource_selection_enabled,
'resource_selection[icon_url]' : resource_selection_icon_url,
'resource_selection[selection_width]' : resource_selection_selection_width,
'resource_selection[selection_height]' : resource_selection_selection_height,
'config_type' : config_type,
'config_xml' : config_xml,
'config_url' : config_url,
}
url = request_ctx.base_api_url + path.format(course_id=course_id, external_tool_id=external_tool_id)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response
def edit_external_tool_accounts(request_ctx, account_id, external_tool_id, name=None, privacy_level=None, consumer_key=None, shared_secret=None, description=None, url=None, domain=None, icon_url=None, text=None, custom_fields=None, account_navigation_url=None, account_navigation_enabled=None, account_navigation_text=None, user_navigation_url=None, user_navigation_enabled=None, user_navigation_text=None, course_navigation_url=None, course_navigation_enabled=None, course_navigation_text=None, course_navigation_visibility=None, course_navigation_default=None, editor_button_url=None, editor_button_enabled=None, editor_button_icon_url=None, editor_button_selection_width=None, editor_button_selection_height=None, resource_selection_url=None, resource_selection_enabled=None, resource_selection_icon_url=None, resource_selection_selection_width=None, resource_selection_selection_height=None, config_type=None, config_xml=None, config_url=None, **request_kwargs):
"""
Update the specified external tool. Uses same parameters as create
:param request_ctx: The request context
:type request_ctx: :class:RequestContext
:param account_id: (required) ID
:type account_id: string
:param external_tool_id: (required) ID
:type external_tool_id: string
:param name: (optional) The name of the tool
:type name: string or None
:param privacy_level: (optional) What information to send to the external tool.
:type privacy_level: string or None
:param consumer_key: (optional) The consumer key for the external tool
:type consumer_key: string or None
:param shared_secret: (optional) The shared secret with the external tool
:type shared_secret: string or None
:param description: (optional) A description of the tool
:type description: string or None
:param url: (optional) The url to match links against. Either "url" or "domain" should be set, not both.
:type url: string or None
:param domain: (optional) The domain to match links against. Either "url" or "domain" should be set, not both.
:type domain: string or None
:param icon_url: (optional) The url of the icon to show for this tool
:type icon_url: string or None
:param text: (optional) The default text to show for this tool
:type text: string or None
:param custom_fields: (optional) Custom fields that will be sent to the tool consumer, specified as custom_fields[field_name]
:type custom_fields: string or None
:param account_navigation_url: (optional) The url of the external tool for account navigation
:type account_navigation_url: string or None
:param account_navigation_enabled: (optional) Set this to enable this feature
:type account_navigation_enabled: boolean or None
:param account_navigation_text: (optional) The text that will show on the left-tab in the account navigation
:type account_navigation_text: string or None
:param user_navigation_url: (optional) The url of the external tool for user navigation
:type user_navigation_url: string or None
:param user_navigation_enabled: (optional) Set this to enable this feature
:type user_navigation_enabled: boolean or None
:param user_navigation_text: (optional) The text that will show on the left-tab in the user navigation
:type user_navigation_text: string or None
:param course_navigation_url: (optional) The url of the external tool for course navigation
:type course_navigation_url: string or None
:param course_navigation_enabled: (optional) Set this to enable this feature
:type course_navigation_enabled: boolean or None
:param course_navigation_text: (optional) The text that will show on the left-tab in the course navigation
:type course_navigation_text: string or None
:param course_navigation_visibility: (optional) Who will see the navigation tab. "admins" for course admins, "members" for students, null for everyone
:type course_navigation_visibility: string or None
:param course_navigation_default: (optional) Whether the navigation option will show in the course by default or whether the teacher will have to explicitly enable it
:type course_navigation_default: boolean or None
:param editor_button_url: (optional) The url of the external tool
:type editor_button_url: string or None
:param editor_button_enabled: (optional) Set this to enable this feature
:type editor_button_enabled: boolean or None
:param editor_button_icon_url: (optional) The url of the icon to show in the WYSIWYG editor
:type editor_button_icon_url: string or None
:param editor_button_selection_width: (optional) The width of the dialog the tool is launched in
:type editor_button_selection_width: string or None
:param editor_button_selection_height: (optional) The height of the dialog the tool is launched in
:type editor_button_selection_height: string or None
:param resource_selection_url: (optional) The url of the external tool
:type resource_selection_url: string or None
:param resource_selection_enabled: (optional) Set this to enable this feature
:type resource_selection_enabled: boolean or None
:param resource_selection_icon_url: (optional) The url of the icon to show in the module external tool list
:type resource_selection_icon_url: string or None
:param resource_selection_selection_width: (optional) The width of the dialog the tool is launched in
:type resource_selection_selection_width: string or None
:param resource_selection_selection_height: (optional) The height of the dialog the tool is launched in
:type resource_selection_selection_height: string or None
:param config_type: (optional) Configuration can be passed in as CC xml instead of using query parameters. If this value is "by_url" or "by_xml" then an xml configuration will be expected in either the "config_xml" or "config_url" parameter. Note that the name parameter overrides the tool name provided in the xml
:type config_type: string or None
:param config_xml: (optional) XML tool configuration, as specified in the CC xml specification. This is required if "config_type" is set to "by_xml"
:type config_xml: string or None
:param config_url: (optional) URL where the server can retrieve an XML tool configuration, as specified in the CC xml specification. This is required if "config_type" is set to "by_url"
:type config_url: string or None
:return: Edit an external tool
:rtype: requests.Response (with void data)
"""
privacy_level_types = ('anonymous', 'name_only', 'public')
course_navigation_visibility_types = ('admins', 'members')
utils.validate_attr_is_acceptable(privacy_level, privacy_level_types)
utils.validate_attr_is_acceptable(course_navigation_visibility, course_navigation_visibility_types)
path = '/v1/accounts/{account_id}/external_tools/{external_tool_id}'
payload = {
'name': name,
'privacy_level': privacy_level,
'consumer_key': consumer_key,
'shared_secret': shared_secret,
'description': description,
'url': url,
'domain': domain,
'icon_url': icon_url,
'text' : text,
'custom_fields' : custom_fields,
'account_navigation[url]' : account_navigation_url,
'account_navigation[enabled]' : account_navigation_enabled,
'account_navigation[text]' : account_navigation_text,
'user_navigation[url]' : user_navigation_url,
'user_navigation[enabled]' : user_navigation_enabled,
'user_navigation[text]' : user_navigation_text,
'course_navigation[url]' : course_navigation_url,
'course_navigation[enabled]' : course_navigation_enabled,
'course_navigation[text]' : course_navigation_text,
'course_navigation[visibility]' : course_navigation_visibility,
'course_navigation[default]' : course_navigation_default,
'editor_button[url]' : editor_button_url,
'editor_button[enabled]' : editor_button_enabled,
'editor_button[icon_url]' : editor_button_icon_url,
'editor_button[selection_width]' : editor_button_selection_width,
'editor_button[selection_height]' : editor_button_selection_height,
'resource_selection[url]' : resource_selection_url,
'resource_selection[enabled]' : resource_selection_enabled,
'resource_selection[icon_url]' : resource_selection_icon_url,
'resource_selection[selection_width]' : resource_selection_selection_width,
'resource_selection[selection_height]' : resource_selection_selection_height,
'config_type' : config_type,
'config_xml' : config_xml,
'config_url' : config_url,
}
url = request_ctx.base_api_url + path.format(account_id=account_id, external_tool_id=external_tool_id)
response = client.put(request_ctx, url, payload=payload, **request_kwargs)
return response
| 71.296154
| 966
| 0.73944
| 2,476
| 18,537
| 5.27706
| 0.062197
| 0.031226
| 0.055564
| 0.070259
| 0.984234
| 0.982244
| 0.982244
| 0.982244
| 0.982244
| 0.982244
| 0
| 0.000134
| 0.195663
| 18,537
| 259
| 967
| 71.571429
| 0.87619
| 0.599827
| 0
| 0.901099
| 0
| 0
| 0.221632
| 0.164675
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021978
| false
| 0
| 0.010989
| 0
| 0.054945
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9cff109d318f446838480d5775f4c0da25a435f7
| 25,756
|
py
|
Python
|
datasets/kitti_comb_multiframe.py
|
pgodet/star_flow
|
cedb96ff339d11abf71d12d09e794593a742ccce
|
[
"Apache-2.0"
] | 10
|
2020-11-17T12:55:00.000Z
|
2022-01-13T07:23:55.000Z
|
datasets/kitti_comb_multiframe.py
|
pgodet/star_flow
|
cedb96ff339d11abf71d12d09e794593a742ccce
|
[
"Apache-2.0"
] | 1
|
2021-01-02T22:46:07.000Z
|
2021-01-02T22:46:07.000Z
|
datasets/kitti_comb_multiframe.py
|
pgodet/star_flow
|
cedb96ff339d11abf71d12d09e794593a742ccce
|
[
"Apache-2.0"
] | 1
|
2021-01-26T10:53:02.000Z
|
2021-01-26T10:53:02.000Z
|
from __future__ import absolute_import, division, print_function
import os
import torch.utils.data as data
from glob import glob
from torchvision import transforms as vision_transforms
from . import transforms
from . import common
import numpy as np
import png
VALIDATE_INDICES_2015 = [10, 11, 12, 25, 26, 30, 31, 40, 41, 42, 46, 52, 53, 72, 73, 74, 75, 76, 80, 81, 85, 86, 95, 96, 97, 98, 104, 116, 117, 120, 121, 126, 127, 153, 172, 175, 183, 184, 190, 199]
VALIDATE_INDICES_2012 = [0, 12, 15, 16, 17, 18, 24, 30, 38, 39, 42, 50, 54, 59, 60, 61, 77, 78, 81, 89, 97, 101, 107, 121, 124, 142, 145, 146, 152, 154, 155, 158, 159, 160, 164, 182, 183, 184, 190]
def read_png_flow(flow_file):
flow_object = png.Reader(filename=flow_file)
flow_direct = flow_object.asDirect()
flow_data = list(flow_direct[2])
(w, h) = flow_direct[3]['size']
flow = np.zeros((h, w, 3), dtype=np.float64)
for i in range(len(flow_data)):
flow[i, :, 0] = flow_data[i][0::3]
flow[i, :, 1] = flow_data[i][1::3]
flow[i, :, 2] = flow_data[i][2::3]
invalid_idx = (flow[:, :, 2] == 0)
flow[:, :, 0:2] = (flow[:, :, 0:2] - 2 ** 15) / 64.0
flow[invalid_idx, 0] = 0
flow[invalid_idx, 1] = 0
return flow[:, :, 0:2], (1 - invalid_idx * 1)[:, :, None]
def kitti_random_crop(im1, im2, flo_f, valid_mask, crop_height=370, crop_width=1224):
height, width, _ = im1.shape
# get starting positions
x = np.random.uniform(0, width - crop_width + 1)
y = np.random.uniform(0, height - crop_height + 1)
str_x = int(x)
str_y = int(y)
end_x = int(x + crop_width)
end_y = int(y + crop_height)
im1 = im1[str_y:end_y, str_x:end_x, :]
im2 = im2[str_y:end_y, str_x:end_x, :]
flo_f = flo_f[str_y:end_y, str_x:end_x, :]
valid_mask = valid_mask[str_y:end_y, str_x:end_x, :]
return im1, im2, flo_f, valid_mask
def kitti_random_crop_multi(imgs, flo_f, valid_mask, crop_height=370, crop_width=1224):
height, width, _ = imgs[0].shape
# get starting positions
x = np.random.uniform(0, width - crop_width + 1)
y = np.random.uniform(0, height - crop_height + 1)
str_x = int(x)
str_y = int(y)
end_x = int(x + crop_width)
end_y = int(y + crop_height)
for k in range(len(imgs)):
imgs[k] = imgs[k][str_y:end_y, str_x:end_x, :]
flo_f = flo_f[str_y:end_y, str_x:end_x, :]
valid_mask = valid_mask[str_y:end_y, str_x:end_x, :]
return imgs, flo_f, valid_mask
class KittiMultiframe_comb_test(data.Dataset):
def __init__(self,
args,
images_root_2015=None,
images_root_2012=None,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=True):
self._args = args
self.preprocessing_crop = preprocessing_crop
self._nframes = nframes
list_of_indices_2012 = []
list_of_indices_2015 = []
# ----------------------------------------------------------
# KITTI 2015
# ----------------------------------------------------------
if images_root_2015 is not None:
if not os.path.isdir(images_root_2015):
raise ValueError("Image directory '%s' not found!")
all_img_n_2015_filenames = [] # [time][sample]
for n in range(self._nframes):
time_index = 12 - self._nframes + n
all_img_n_2015_filenames.append(sorted(glob(os.path.join(images_root_2015, "*_{:02d}.png".format(time_index)))))
assert len(all_img_n_2015_filenames[0]) != 0
for n in range(self._nframes):
assert len(all_img_n_2015_filenames[0]) == len(all_img_n_2015_filenames[n])
list_of_indices_2015 = range(len(all_img_n_2015_filenames[0]))
# ----------------------------------------------------------
# KITTI 2012
# ----------------------------------------------------------
if images_root_2012 is not None:
if not os.path.isdir(images_root_2012):
raise ValueError("Image directory '%s' not found!")
all_img_n_2012_filenames = [] # [time][sample]
for n in range(self._nframes):
time_index = 12 - self._nframes + n
all_img_n_2012_filenames.append(sorted(glob(os.path.join(images_root_2012, "*_{:02d}.png".format(time_index)))))
assert len(all_img_n_2012_filenames[0]) != 0
for n in range(self._nframes):
assert len(all_img_n_2012_filenames[0]) == len(all_img_n_2012_filenames[n])
list_of_indices_2012 = range(len(all_img_n_2012_filenames[0]))
# ----------------------------------------------------------
# Save list of actual filenames for inputs and flows
# ----------------------------------------------------------
self._image_list = []
self._flow_list = []
for ii in list_of_indices_2015:
imgs = []
for n in range(self._nframes):
imgs.append(all_img_n_2015_filenames[n][ii])
idx_0 = os.path.splitext(os.path.basename(imgs[0]))[0][:-3]
for n in range(self._nframes):
idx_n = os.path.splitext(os.path.basename(imgs[n]))[0][:-3]
assert idx_0 == idx_n
if np.any([not os.path.isfile(im) for im in imgs]):
continue
#if not os.path.isfile(im1) or not os.path.isfile(im2):
# continue
self._image_list += [imgs]
for ii in list_of_indices_2012:
imgs = []
for n in range(self._nframes):
imgs.append(all_img_n_2012_filenames[n][ii])
idx_0 = os.path.splitext(os.path.basename(imgs[0]))[0][:-3]
for n in range(self._nframes):
idx_n = os.path.splitext(os.path.basename(imgs[n]))[0][:-3]
assert idx_0 == idx_n
if np.any([not os.path.isfile(im) for im in imgs]):
continue
#if not os.path.isfile(im1) or not os.path.isfile(im2):
# continue
self._image_list += [imgs]
self._size = len(self._image_list)
assert len(self._image_list) != 0
# ----------------------------------------------------------
# photometric_augmentations
# ----------------------------------------------------------
if photometric_augmentations:
self._photometric_transform = transforms.ConcatTransformSplitChainer([
# uint8 -> PIL
vision_transforms.ToPILImage(),
# PIL -> PIL : random hsv and contrast
vision_transforms.ColorJitter(brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5),
# PIL -> FloatTensor
vision_transforms.transforms.ToTensor(),
transforms.RandomGamma(min_gamma=0.7, max_gamma=1.5, clip_image=True),
], from_numpy=True, to_numpy=False)
else:
self._photometric_transform = transforms.ConcatTransformSplitChainer([
# uint8 -> FloatTensor
vision_transforms.transforms.ToTensor(),
], from_numpy=True, to_numpy=False)
def __getitem__(self, index):
index = index % self._size
imgs_filenames = self._image_list[index]
# read float32 images and flow
imgs_np0 = [common.read_image_as_byte(filename) for filename in imgs_filenames]
# possibly apply photometric transformations
imgs = self._photometric_transform(*imgs_np0)
# example filename
#basename = os.path.basename(im1_filename)[:6]
basenames = [os.path.basename(f)[:9] for f in imgs_filenames]
example_dict = {
"input1": imgs[0],
"input_images": imgs,
"index": index,
"basename": basenames,
"nframes":self._nframes
}
return example_dict
def __len__(self):
return self._size
class KittiMultiframe_comb(data.Dataset):
def __init__(self,
args,
images_root_2015=None,
flow_root_2015=None,
images_root_2012=None,
flow_root_2012=None,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=True,
dstype="full"):
self._args = args
self.preprocessing_crop = preprocessing_crop
self._nframes = nframes
list_of_indices_2012 = []
list_of_indices_2015 = []
# ----------------------------------------------------------
# KITTI 2015
# ----------------------------------------------------------
if images_root_2015 is not None and flow_root_2015 is not None:
if not os.path.isdir(images_root_2015):
raise ValueError("Image directory '%s' not found!", images_root_2015)
if not os.path.isdir(flow_root_2015):
raise ValueError("Flow directory '%s' not found!", flow_root_2015)
all_img_n_2015_filenames = [] # [time][sample]
for n in range(self._nframes):
time_index = 12 - self._nframes + n
all_img_n_2015_filenames.append(sorted(glob(os.path.join(images_root_2015, "*_{:02d}.png".format(time_index)))))
flow_f_2015_filenames = sorted(glob(os.path.join(flow_root_2015, "*_10.png")))
assert len(all_img_n_2015_filenames[0]) != 0
for n in range(self._nframes):
assert len(all_img_n_2015_filenames[0]) == len(all_img_n_2015_filenames[n])
assert len(flow_f_2015_filenames) == len(all_img_n_2015_filenames[0])
num_flows_2015 = len(flow_f_2015_filenames)
validate_indices_2015 = [x for x in VALIDATE_INDICES_2015 if x in range(num_flows_2015)]
if dstype == "train":
list_of_indices_2015 = [x for x in range(num_flows_2015) if x not in validate_indices_2015]
elif dstype == "valid":
list_of_indices_2015 = validate_indices_2015
elif dstype == "full":
list_of_indices_2015 = range(len(all_img_n_2015_filenames[0]))
else:
raise ValueError("KITTI 2015: dstype '%s' unknown!", dstype)
# ----------------------------------------------------------
# KITTI 2012
# ----------------------------------------------------------
if images_root_2012 is not None and flow_root_2012 is not None:
if not os.path.isdir(images_root_2012):
raise ValueError("Image directory '%s' not found!", images_root_2012)
if not os.path.isdir(flow_root_2012):
raise ValueError("Flow directory '%s' not found!", flow_root_2012)
all_img_n_2012_filenames = [] # [time][sample]
for n in range(self._nframes):
time_index = 12 - self._nframes + n
all_img_n_2012_filenames.append(sorted(glob(os.path.join(images_root_2012, "*_{:02d}.png".format(time_index)))))
flow_f_2012_filenames = sorted(glob(os.path.join(flow_root_2012, "*_10.png")))
assert len(all_img_n_2012_filenames[0]) != 0
for n in range(self._nframes):
assert len(all_img_n_2012_filenames[0]) == len(all_img_n_2012_filenames[n])
assert len(flow_f_2012_filenames) == len(all_img_n_2012_filenames[0])
num_flows_2012 = len(flow_f_2012_filenames)
validate_indices_2012 = [x for x in VALIDATE_INDICES_2012 if x in range(num_flows_2012)]
if dstype == "train":
list_of_indices_2012 = [x for x in range(num_flows_2012) if x not in validate_indices_2012]
elif dstype == "valid":
list_of_indices_2012 = validate_indices_2012
elif dstype == "full":
list_of_indices_2012 = range(len(all_img_n_2012_filenames[0]))
else:
raise ValueError("KITTI 2012: dstype '%s' unknown!", dstype)
# ----------------------------------------------------------
# Save list of actual filenames for inputs and flows
# ----------------------------------------------------------
self._image_list = []
self._flow_list = []
for ii in list_of_indices_2015:
imgs = []
for n in range(self._nframes):
imgs.append(all_img_n_2015_filenames[n][ii])
idx_0 = os.path.splitext(os.path.basename(imgs[0]))[0][:-3]
for n in range(self._nframes):
idx_n = os.path.splitext(os.path.basename(imgs[n]))[0][:-3]
assert idx_0 == idx_n
if np.any([not os.path.isfile(im) for im in imgs]):
continue
#if not os.path.isfile(im1) or not os.path.isfile(im2):
# continue
self._image_list += [imgs]
if dstype is not "test":
flo_f = flow_f_2015_filenames[ii]
idx_f = os.path.splitext(os.path.basename(flo_f))[0][:-3]
assert idx_0 == idx_f
if not os.path.isfile(flo_f):
continue
self._flow_list += [[flo_f]]
for ii in list_of_indices_2012:
imgs = []
for n in range(self._nframes):
imgs.append(all_img_n_2012_filenames[n][ii])
idx_0 = os.path.splitext(os.path.basename(imgs[0]))[0][:-3]
for n in range(self._nframes):
idx_n = os.path.splitext(os.path.basename(imgs[n]))[0][:-3]
assert idx_0 == idx_n
if np.any([not os.path.isfile(im) for im in imgs]):
continue
#if not os.path.isfile(im1) or not os.path.isfile(im2):
# continue
self._image_list += [imgs]
if dstype is not "test":
flo_f = flow_f_2012_filenames[ii]
idx_f = os.path.splitext(os.path.basename(flo_f))[0][:-3]
assert idx_0 == idx_f
if not os.path.isfile(flo_f):
continue
self._flow_list += [[flo_f]]
self._size = len(self._image_list)
assert len(self._image_list) != 0
if dstype is not "test":
assert len(self._image_list) == len(self._flow_list)
# ----------------------------------------------------------
# photometric_augmentations
# ----------------------------------------------------------
if photometric_augmentations:
self._photometric_transform = transforms.ConcatTransformSplitChainer([
# uint8 -> PIL
vision_transforms.ToPILImage(),
# PIL -> PIL : random hsv and contrast
vision_transforms.ColorJitter(brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5),
# PIL -> FloatTensor
vision_transforms.transforms.ToTensor(),
transforms.RandomGamma(min_gamma=0.7, max_gamma=1.5, clip_image=True),
], from_numpy=True, to_numpy=False)
else:
self._photometric_transform = transforms.ConcatTransformSplitChainer([
# uint8 -> FloatTensor
vision_transforms.transforms.ToTensor(),
], from_numpy=True, to_numpy=False)
def __getitem__(self, index):
index = index % self._size
imgs_filenames = self._image_list[index]
flo_f_filename = self._flow_list[index][0]
# read float32 images and flow
imgs_np0 = [common.read_image_as_byte(filename) for filename in imgs_filenames]
flo_f_np0, valid_mask = read_png_flow(flo_f_filename)
if self.preprocessing_crop:
imgs_np0, flo_f_np0, valid_mask = kitti_random_crop_multi(imgs_np0, flo_f_np0, valid_mask)
# possibly apply photometric transformations
imgs = self._photometric_transform(*imgs_np0)
# convert flow to FloatTensor
flo_f = common.numpy2torch(flo_f_np0)
valid_mask_f = common.numpy2torch(valid_mask)
# example filename
basenames = [os.path.basename(f)[:9] for f in imgs_filenames]
#basename = os.path.basename(im1_filename)[:6]
example_dict = {
"input1": imgs[0],
"input_images": imgs,
"target1": flo_f,
"index": index,
"basename": basenames,
"nframes":self._nframes,
"input_valid": valid_mask_f
}
return example_dict
def __len__(self):
return self._size
class KittiMultiframeCombTrain(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=True,
preprocessing_crop=True):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "image_2")
flow_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "flow_occ")
images_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "colored_0")
flow_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "flow_occ")
super(KittiMultiframeCombTrain, self).__init__(
args,
images_root_2015=images_root_2015,
flow_root_2015=flow_root_2015,
images_root_2012=images_root_2012,
flow_root_2012=flow_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="train")
class KittiMultiframeCombVal(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=False):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "image_2")
flow_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "flow_occ")
images_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "colored_0")
flow_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "flow_occ")
super(KittiMultiframeCombVal, self).__init__(
args,
images_root_2015=images_root_2015,
flow_root_2015=flow_root_2015,
images_root_2012=images_root_2012,
flow_root_2012=flow_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="valid")
class KittiMultiframeCombFull(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=True,
preprocessing_crop=True):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "image_2")
flow_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "flow_occ")
images_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "colored_0")
flow_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "flow_occ")
super(KittiMultiframeCombFull, self).__init__(
args,
images_root_2015=images_root_2015,
flow_root_2015=flow_root_2015,
images_root_2012=images_root_2012,
flow_root_2012=flow_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="full")
class KittiMultiframeComb2015Train(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=True,
preprocessing_crop=True):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "image_2")
flow_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "flow_occ")
super(KittiMultiframeComb2015Train, self).__init__(
args,
images_root_2015=images_root_2015,
flow_root_2015=flow_root_2015,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="train")
class KittiMultiframeComb2015Val(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=False):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "image_2")
flow_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "flow_occ")
super(KittiMultiframeComb2015Val, self).__init__(
args,
images_root_2015=images_root_2015,
flow_root_2015=flow_root_2015,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="valid")
class KittiMultiframeComb2015Full(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=True,
preprocessing_crop=True):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "image_2")
flow_root_2015 = os.path.join(root, "kitti_flow_2015", "training", "flow_occ")
super(KittiMultiframeComb2015Full, self).__init__(
args,
images_root_2015=images_root_2015,
flow_root_2015=flow_root_2015,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="full")
class KittiMultiframeComb2015Test(KittiMultiframe_comb_test):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=False):
images_root_2015 = os.path.join(root, "kitti_flow_2015", "testing", "image_2")
super(KittiMultiframeComb2015Test, self).__init__(
args,
images_root_2015=images_root_2015,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop)
class KittiMultiframeComb2012Train(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=True,
preprocessing_crop=True):
images_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "colored_0")
flow_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "flow_occ")
super(KittiMultiframeComb2012Train, self).__init__(
args,
images_root_2012=images_root_2012,
flow_root_2012=flow_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="train")
class KittiMultiframeComb2012Val(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=False):
images_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "colored_0")
flow_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "flow_occ")
super(KittiMultiframeComb2012Val, self).__init__(
args,
images_root_2012=images_root_2012,
flow_root_2012=flow_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="valid")
class KittiMultiframeComb2012Full(KittiMultiframe_comb):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=True,
preprocessing_crop=True):
images_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "colored_0")
flow_root_2012 = os.path.join(root, "kitti_flow_2012", "training", "flow_occ")
super(KittiMultiframeComb2012Full, self).__init__(
args,
images_root_2012=images_root_2012,
flow_root_2012=flow_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop,
dstype="full")
class KittiMultiframeComb2012Test(KittiMultiframe_comb_test):
def __init__(self,
args,
root,
nframes=5,
photometric_augmentations=False,
preprocessing_crop=False):
images_root_2012 = os.path.join(root, "kitti_flow_2012", "testing", "colored_0")
super(KittiMultiframeComb2012Test, self).__init__(
args,
images_root_2012=images_root_2012,
nframes=nframes,
photometric_augmentations=photometric_augmentations,
preprocessing_crop=preprocessing_crop)
| 40.11838
| 198
| 0.573575
| 2,980
| 25,756
| 4.615436
| 0.088591
| 0.033154
| 0.023266
| 0.026465
| 0.863167
| 0.845281
| 0.818598
| 0.785735
| 0.773811
| 0.757452
| 0
| 0.072443
| 0.294689
| 25,756
| 642
| 199
| 40.11838
| 0.684686
| 0.077885
| 0
| 0.778481
| 0
| 0
| 0.054864
| 0
| 0
| 0
| 0
| 0
| 0.040084
| 1
| 0.042194
| false
| 0
| 0.018987
| 0.004219
| 0.103376
| 0.00211
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
143c9959e801579a7f8ac814e9602619660904f1
| 122
|
py
|
Python
|
emat/workbench/util/__init__.py
|
jinsanity07git/tmip-emat
|
ff816cf50f141825078bb276d6da46d92c5028a9
|
[
"BSD-3-Clause"
] | 75
|
2015-01-14T20:39:14.000Z
|
2022-03-31T09:28:15.000Z
|
emat/workbench/util/__init__.py
|
jinsanity07git/tmip-emat
|
ff816cf50f141825078bb276d6da46d92c5028a9
|
[
"BSD-3-Clause"
] | 92
|
2015-01-15T16:12:38.000Z
|
2022-03-23T20:46:37.000Z
|
emat/workbench/util/__init__.py
|
jinsanity07git/tmip-emat
|
ff816cf50f141825078bb276d6da46d92c5028a9
|
[
"BSD-3-Clause"
] | 64
|
2015-02-16T15:07:12.000Z
|
2022-03-23T16:17:16.000Z
|
from __future__ import absolute_import
from .ema_exceptions import *
from .ema_logging import *
from .utilities import *
| 20.333333
| 38
| 0.811475
| 16
| 122
| 5.75
| 0.5
| 0.326087
| 0.282609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139344
| 122
| 5
| 39
| 24.4
| 0.87619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
144853db164eb9ed946e108694256353be07f071
| 47
|
py
|
Python
|
prla/assignments/a0/sum_first.py
|
AegirAexx/python-sandbox
|
fa1f584f615c6ed04f80b9dd92d2b241248c9ebe
|
[
"Unlicense"
] | null | null | null |
prla/assignments/a0/sum_first.py
|
AegirAexx/python-sandbox
|
fa1f584f615c6ed04f80b9dd92d2b241248c9ebe
|
[
"Unlicense"
] | null | null | null |
prla/assignments/a0/sum_first.py
|
AegirAexx/python-sandbox
|
fa1f584f615c6ed04f80b9dd92d2b241248c9ebe
|
[
"Unlicense"
] | null | null | null |
def sum_first(lis, i):
return sum(lis[:i])
| 15.666667
| 23
| 0.617021
| 9
| 47
| 3.111111
| 0.666667
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191489
| 47
| 2
| 24
| 23.5
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
146d3ac5921b62738c602c4c6fe3564d9ebc1835
| 177,609
|
py
|
Python
|
grammars-v4/generated/PlSqlParserListener.py
|
coder-chenzhi/priscilla
|
38acb923a41f1478b1c9aa51175ff2829d9f6f2a
|
[
"MIT"
] | null | null | null |
grammars-v4/generated/PlSqlParserListener.py
|
coder-chenzhi/priscilla
|
38acb923a41f1478b1c9aa51175ff2829d9f6f2a
|
[
"MIT"
] | null | null | null |
grammars-v4/generated/PlSqlParserListener.py
|
coder-chenzhi/priscilla
|
38acb923a41f1478b1c9aa51175ff2829d9f6f2a
|
[
"MIT"
] | null | null | null |
# Generated from grammars-v4/plsql/PlSqlParser.g4 by ANTLR 4.7.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .PlSqlParser import PlSqlParser
else:
from PlSqlParser import PlSqlParser
# This class defines a complete listener for a parse tree produced by PlSqlParser.
class PlSqlParserListener(ParseTreeListener):
# Enter a parse tree produced by PlSqlParser#sql_script.
def enterSql_script(self, ctx:PlSqlParser.Sql_scriptContext):
pass
# Exit a parse tree produced by PlSqlParser#sql_script.
def exitSql_script(self, ctx:PlSqlParser.Sql_scriptContext):
pass
# Enter a parse tree produced by PlSqlParser#unit_statement.
def enterUnit_statement(self, ctx:PlSqlParser.Unit_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#unit_statement.
def exitUnit_statement(self, ctx:PlSqlParser.Unit_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_function.
def enterDrop_function(self, ctx:PlSqlParser.Drop_functionContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_function.
def exitDrop_function(self, ctx:PlSqlParser.Drop_functionContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_function.
def enterAlter_function(self, ctx:PlSqlParser.Alter_functionContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_function.
def exitAlter_function(self, ctx:PlSqlParser.Alter_functionContext):
pass
# Enter a parse tree produced by PlSqlParser#create_function_body.
def enterCreate_function_body(self, ctx:PlSqlParser.Create_function_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#create_function_body.
def exitCreate_function_body(self, ctx:PlSqlParser.Create_function_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#parallel_enable_clause.
def enterParallel_enable_clause(self, ctx:PlSqlParser.Parallel_enable_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#parallel_enable_clause.
def exitParallel_enable_clause(self, ctx:PlSqlParser.Parallel_enable_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#partition_by_clause.
def enterPartition_by_clause(self, ctx:PlSqlParser.Partition_by_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#partition_by_clause.
def exitPartition_by_clause(self, ctx:PlSqlParser.Partition_by_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#result_cache_clause.
def enterResult_cache_clause(self, ctx:PlSqlParser.Result_cache_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#result_cache_clause.
def exitResult_cache_clause(self, ctx:PlSqlParser.Result_cache_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#relies_on_part.
def enterRelies_on_part(self, ctx:PlSqlParser.Relies_on_partContext):
pass
# Exit a parse tree produced by PlSqlParser#relies_on_part.
def exitRelies_on_part(self, ctx:PlSqlParser.Relies_on_partContext):
pass
# Enter a parse tree produced by PlSqlParser#streaming_clause.
def enterStreaming_clause(self, ctx:PlSqlParser.Streaming_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#streaming_clause.
def exitStreaming_clause(self, ctx:PlSqlParser.Streaming_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_package.
def enterDrop_package(self, ctx:PlSqlParser.Drop_packageContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_package.
def exitDrop_package(self, ctx:PlSqlParser.Drop_packageContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_package.
def enterAlter_package(self, ctx:PlSqlParser.Alter_packageContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_package.
def exitAlter_package(self, ctx:PlSqlParser.Alter_packageContext):
pass
# Enter a parse tree produced by PlSqlParser#create_package.
def enterCreate_package(self, ctx:PlSqlParser.Create_packageContext):
pass
# Exit a parse tree produced by PlSqlParser#create_package.
def exitCreate_package(self, ctx:PlSqlParser.Create_packageContext):
pass
# Enter a parse tree produced by PlSqlParser#create_package_body.
def enterCreate_package_body(self, ctx:PlSqlParser.Create_package_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#create_package_body.
def exitCreate_package_body(self, ctx:PlSqlParser.Create_package_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#package_obj_spec.
def enterPackage_obj_spec(self, ctx:PlSqlParser.Package_obj_specContext):
pass
# Exit a parse tree produced by PlSqlParser#package_obj_spec.
def exitPackage_obj_spec(self, ctx:PlSqlParser.Package_obj_specContext):
pass
# Enter a parse tree produced by PlSqlParser#procedure_spec.
def enterProcedure_spec(self, ctx:PlSqlParser.Procedure_specContext):
pass
# Exit a parse tree produced by PlSqlParser#procedure_spec.
def exitProcedure_spec(self, ctx:PlSqlParser.Procedure_specContext):
pass
# Enter a parse tree produced by PlSqlParser#function_spec.
def enterFunction_spec(self, ctx:PlSqlParser.Function_specContext):
pass
# Exit a parse tree produced by PlSqlParser#function_spec.
def exitFunction_spec(self, ctx:PlSqlParser.Function_specContext):
pass
# Enter a parse tree produced by PlSqlParser#package_obj_body.
def enterPackage_obj_body(self, ctx:PlSqlParser.Package_obj_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#package_obj_body.
def exitPackage_obj_body(self, ctx:PlSqlParser.Package_obj_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_procedure.
def enterDrop_procedure(self, ctx:PlSqlParser.Drop_procedureContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_procedure.
def exitDrop_procedure(self, ctx:PlSqlParser.Drop_procedureContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_procedure.
def enterAlter_procedure(self, ctx:PlSqlParser.Alter_procedureContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_procedure.
def exitAlter_procedure(self, ctx:PlSqlParser.Alter_procedureContext):
pass
# Enter a parse tree produced by PlSqlParser#function_body.
def enterFunction_body(self, ctx:PlSqlParser.Function_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#function_body.
def exitFunction_body(self, ctx:PlSqlParser.Function_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#procedure_body.
def enterProcedure_body(self, ctx:PlSqlParser.Procedure_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#procedure_body.
def exitProcedure_body(self, ctx:PlSqlParser.Procedure_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#create_procedure_body.
def enterCreate_procedure_body(self, ctx:PlSqlParser.Create_procedure_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#create_procedure_body.
def exitCreate_procedure_body(self, ctx:PlSqlParser.Create_procedure_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_trigger.
def enterDrop_trigger(self, ctx:PlSqlParser.Drop_triggerContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_trigger.
def exitDrop_trigger(self, ctx:PlSqlParser.Drop_triggerContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_trigger.
def enterAlter_trigger(self, ctx:PlSqlParser.Alter_triggerContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_trigger.
def exitAlter_trigger(self, ctx:PlSqlParser.Alter_triggerContext):
pass
# Enter a parse tree produced by PlSqlParser#create_trigger.
def enterCreate_trigger(self, ctx:PlSqlParser.Create_triggerContext):
pass
# Exit a parse tree produced by PlSqlParser#create_trigger.
def exitCreate_trigger(self, ctx:PlSqlParser.Create_triggerContext):
pass
# Enter a parse tree produced by PlSqlParser#trigger_follows_clause.
def enterTrigger_follows_clause(self, ctx:PlSqlParser.Trigger_follows_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#trigger_follows_clause.
def exitTrigger_follows_clause(self, ctx:PlSqlParser.Trigger_follows_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#trigger_when_clause.
def enterTrigger_when_clause(self, ctx:PlSqlParser.Trigger_when_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#trigger_when_clause.
def exitTrigger_when_clause(self, ctx:PlSqlParser.Trigger_when_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#simple_dml_trigger.
def enterSimple_dml_trigger(self, ctx:PlSqlParser.Simple_dml_triggerContext):
pass
# Exit a parse tree produced by PlSqlParser#simple_dml_trigger.
def exitSimple_dml_trigger(self, ctx:PlSqlParser.Simple_dml_triggerContext):
pass
# Enter a parse tree produced by PlSqlParser#for_each_row.
def enterFor_each_row(self, ctx:PlSqlParser.For_each_rowContext):
pass
# Exit a parse tree produced by PlSqlParser#for_each_row.
def exitFor_each_row(self, ctx:PlSqlParser.For_each_rowContext):
pass
# Enter a parse tree produced by PlSqlParser#compound_dml_trigger.
def enterCompound_dml_trigger(self, ctx:PlSqlParser.Compound_dml_triggerContext):
pass
# Exit a parse tree produced by PlSqlParser#compound_dml_trigger.
def exitCompound_dml_trigger(self, ctx:PlSqlParser.Compound_dml_triggerContext):
pass
# Enter a parse tree produced by PlSqlParser#non_dml_trigger.
def enterNon_dml_trigger(self, ctx:PlSqlParser.Non_dml_triggerContext):
pass
# Exit a parse tree produced by PlSqlParser#non_dml_trigger.
def exitNon_dml_trigger(self, ctx:PlSqlParser.Non_dml_triggerContext):
pass
# Enter a parse tree produced by PlSqlParser#trigger_body.
def enterTrigger_body(self, ctx:PlSqlParser.Trigger_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#trigger_body.
def exitTrigger_body(self, ctx:PlSqlParser.Trigger_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#routine_clause.
def enterRoutine_clause(self, ctx:PlSqlParser.Routine_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#routine_clause.
def exitRoutine_clause(self, ctx:PlSqlParser.Routine_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#compound_trigger_block.
def enterCompound_trigger_block(self, ctx:PlSqlParser.Compound_trigger_blockContext):
pass
# Exit a parse tree produced by PlSqlParser#compound_trigger_block.
def exitCompound_trigger_block(self, ctx:PlSqlParser.Compound_trigger_blockContext):
pass
# Enter a parse tree produced by PlSqlParser#timing_point_section.
def enterTiming_point_section(self, ctx:PlSqlParser.Timing_point_sectionContext):
pass
# Exit a parse tree produced by PlSqlParser#timing_point_section.
def exitTiming_point_section(self, ctx:PlSqlParser.Timing_point_sectionContext):
pass
# Enter a parse tree produced by PlSqlParser#non_dml_event.
def enterNon_dml_event(self, ctx:PlSqlParser.Non_dml_eventContext):
pass
# Exit a parse tree produced by PlSqlParser#non_dml_event.
def exitNon_dml_event(self, ctx:PlSqlParser.Non_dml_eventContext):
pass
# Enter a parse tree produced by PlSqlParser#dml_event_clause.
def enterDml_event_clause(self, ctx:PlSqlParser.Dml_event_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#dml_event_clause.
def exitDml_event_clause(self, ctx:PlSqlParser.Dml_event_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#dml_event_element.
def enterDml_event_element(self, ctx:PlSqlParser.Dml_event_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#dml_event_element.
def exitDml_event_element(self, ctx:PlSqlParser.Dml_event_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#dml_event_nested_clause.
def enterDml_event_nested_clause(self, ctx:PlSqlParser.Dml_event_nested_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#dml_event_nested_clause.
def exitDml_event_nested_clause(self, ctx:PlSqlParser.Dml_event_nested_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#referencing_clause.
def enterReferencing_clause(self, ctx:PlSqlParser.Referencing_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#referencing_clause.
def exitReferencing_clause(self, ctx:PlSqlParser.Referencing_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#referencing_element.
def enterReferencing_element(self, ctx:PlSqlParser.Referencing_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#referencing_element.
def exitReferencing_element(self, ctx:PlSqlParser.Referencing_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_type.
def enterDrop_type(self, ctx:PlSqlParser.Drop_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_type.
def exitDrop_type(self, ctx:PlSqlParser.Drop_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_type.
def enterAlter_type(self, ctx:PlSqlParser.Alter_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_type.
def exitAlter_type(self, ctx:PlSqlParser.Alter_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#compile_type_clause.
def enterCompile_type_clause(self, ctx:PlSqlParser.Compile_type_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#compile_type_clause.
def exitCompile_type_clause(self, ctx:PlSqlParser.Compile_type_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#replace_type_clause.
def enterReplace_type_clause(self, ctx:PlSqlParser.Replace_type_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#replace_type_clause.
def exitReplace_type_clause(self, ctx:PlSqlParser.Replace_type_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_method_spec.
def enterAlter_method_spec(self, ctx:PlSqlParser.Alter_method_specContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_method_spec.
def exitAlter_method_spec(self, ctx:PlSqlParser.Alter_method_specContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_method_element.
def enterAlter_method_element(self, ctx:PlSqlParser.Alter_method_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_method_element.
def exitAlter_method_element(self, ctx:PlSqlParser.Alter_method_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_attribute_definition.
def enterAlter_attribute_definition(self, ctx:PlSqlParser.Alter_attribute_definitionContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_attribute_definition.
def exitAlter_attribute_definition(self, ctx:PlSqlParser.Alter_attribute_definitionContext):
pass
# Enter a parse tree produced by PlSqlParser#attribute_definition.
def enterAttribute_definition(self, ctx:PlSqlParser.Attribute_definitionContext):
pass
# Exit a parse tree produced by PlSqlParser#attribute_definition.
def exitAttribute_definition(self, ctx:PlSqlParser.Attribute_definitionContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_collection_clauses.
def enterAlter_collection_clauses(self, ctx:PlSqlParser.Alter_collection_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_collection_clauses.
def exitAlter_collection_clauses(self, ctx:PlSqlParser.Alter_collection_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#dependent_handling_clause.
def enterDependent_handling_clause(self, ctx:PlSqlParser.Dependent_handling_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#dependent_handling_clause.
def exitDependent_handling_clause(self, ctx:PlSqlParser.Dependent_handling_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#dependent_exceptions_part.
def enterDependent_exceptions_part(self, ctx:PlSqlParser.Dependent_exceptions_partContext):
pass
# Exit a parse tree produced by PlSqlParser#dependent_exceptions_part.
def exitDependent_exceptions_part(self, ctx:PlSqlParser.Dependent_exceptions_partContext):
pass
# Enter a parse tree produced by PlSqlParser#create_type.
def enterCreate_type(self, ctx:PlSqlParser.Create_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#create_type.
def exitCreate_type(self, ctx:PlSqlParser.Create_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#type_definition.
def enterType_definition(self, ctx:PlSqlParser.Type_definitionContext):
pass
# Exit a parse tree produced by PlSqlParser#type_definition.
def exitType_definition(self, ctx:PlSqlParser.Type_definitionContext):
pass
# Enter a parse tree produced by PlSqlParser#object_type_def.
def enterObject_type_def(self, ctx:PlSqlParser.Object_type_defContext):
pass
# Exit a parse tree produced by PlSqlParser#object_type_def.
def exitObject_type_def(self, ctx:PlSqlParser.Object_type_defContext):
pass
# Enter a parse tree produced by PlSqlParser#object_as_part.
def enterObject_as_part(self, ctx:PlSqlParser.Object_as_partContext):
pass
# Exit a parse tree produced by PlSqlParser#object_as_part.
def exitObject_as_part(self, ctx:PlSqlParser.Object_as_partContext):
pass
# Enter a parse tree produced by PlSqlParser#object_under_part.
def enterObject_under_part(self, ctx:PlSqlParser.Object_under_partContext):
pass
# Exit a parse tree produced by PlSqlParser#object_under_part.
def exitObject_under_part(self, ctx:PlSqlParser.Object_under_partContext):
pass
# Enter a parse tree produced by PlSqlParser#nested_table_type_def.
def enterNested_table_type_def(self, ctx:PlSqlParser.Nested_table_type_defContext):
pass
# Exit a parse tree produced by PlSqlParser#nested_table_type_def.
def exitNested_table_type_def(self, ctx:PlSqlParser.Nested_table_type_defContext):
pass
# Enter a parse tree produced by PlSqlParser#sqlj_object_type.
def enterSqlj_object_type(self, ctx:PlSqlParser.Sqlj_object_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#sqlj_object_type.
def exitSqlj_object_type(self, ctx:PlSqlParser.Sqlj_object_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#type_body.
def enterType_body(self, ctx:PlSqlParser.Type_bodyContext):
pass
# Exit a parse tree produced by PlSqlParser#type_body.
def exitType_body(self, ctx:PlSqlParser.Type_bodyContext):
pass
# Enter a parse tree produced by PlSqlParser#type_body_elements.
def enterType_body_elements(self, ctx:PlSqlParser.Type_body_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#type_body_elements.
def exitType_body_elements(self, ctx:PlSqlParser.Type_body_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#map_order_func_declaration.
def enterMap_order_func_declaration(self, ctx:PlSqlParser.Map_order_func_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#map_order_func_declaration.
def exitMap_order_func_declaration(self, ctx:PlSqlParser.Map_order_func_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#subprog_decl_in_type.
def enterSubprog_decl_in_type(self, ctx:PlSqlParser.Subprog_decl_in_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#subprog_decl_in_type.
def exitSubprog_decl_in_type(self, ctx:PlSqlParser.Subprog_decl_in_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#proc_decl_in_type.
def enterProc_decl_in_type(self, ctx:PlSqlParser.Proc_decl_in_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#proc_decl_in_type.
def exitProc_decl_in_type(self, ctx:PlSqlParser.Proc_decl_in_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#func_decl_in_type.
def enterFunc_decl_in_type(self, ctx:PlSqlParser.Func_decl_in_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#func_decl_in_type.
def exitFunc_decl_in_type(self, ctx:PlSqlParser.Func_decl_in_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#constructor_declaration.
def enterConstructor_declaration(self, ctx:PlSqlParser.Constructor_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#constructor_declaration.
def exitConstructor_declaration(self, ctx:PlSqlParser.Constructor_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#modifier_clause.
def enterModifier_clause(self, ctx:PlSqlParser.Modifier_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#modifier_clause.
def exitModifier_clause(self, ctx:PlSqlParser.Modifier_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#object_member_spec.
def enterObject_member_spec(self, ctx:PlSqlParser.Object_member_specContext):
pass
# Exit a parse tree produced by PlSqlParser#object_member_spec.
def exitObject_member_spec(self, ctx:PlSqlParser.Object_member_specContext):
pass
# Enter a parse tree produced by PlSqlParser#sqlj_object_type_attr.
def enterSqlj_object_type_attr(self, ctx:PlSqlParser.Sqlj_object_type_attrContext):
pass
# Exit a parse tree produced by PlSqlParser#sqlj_object_type_attr.
def exitSqlj_object_type_attr(self, ctx:PlSqlParser.Sqlj_object_type_attrContext):
pass
# Enter a parse tree produced by PlSqlParser#element_spec.
def enterElement_spec(self, ctx:PlSqlParser.Element_specContext):
pass
# Exit a parse tree produced by PlSqlParser#element_spec.
def exitElement_spec(self, ctx:PlSqlParser.Element_specContext):
pass
# Enter a parse tree produced by PlSqlParser#element_spec_options.
def enterElement_spec_options(self, ctx:PlSqlParser.Element_spec_optionsContext):
pass
# Exit a parse tree produced by PlSqlParser#element_spec_options.
def exitElement_spec_options(self, ctx:PlSqlParser.Element_spec_optionsContext):
pass
# Enter a parse tree produced by PlSqlParser#subprogram_spec.
def enterSubprogram_spec(self, ctx:PlSqlParser.Subprogram_specContext):
pass
# Exit a parse tree produced by PlSqlParser#subprogram_spec.
def exitSubprogram_spec(self, ctx:PlSqlParser.Subprogram_specContext):
pass
# Enter a parse tree produced by PlSqlParser#type_procedure_spec.
def enterType_procedure_spec(self, ctx:PlSqlParser.Type_procedure_specContext):
pass
# Exit a parse tree produced by PlSqlParser#type_procedure_spec.
def exitType_procedure_spec(self, ctx:PlSqlParser.Type_procedure_specContext):
pass
# Enter a parse tree produced by PlSqlParser#type_function_spec.
def enterType_function_spec(self, ctx:PlSqlParser.Type_function_specContext):
pass
# Exit a parse tree produced by PlSqlParser#type_function_spec.
def exitType_function_spec(self, ctx:PlSqlParser.Type_function_specContext):
pass
# Enter a parse tree produced by PlSqlParser#constructor_spec.
def enterConstructor_spec(self, ctx:PlSqlParser.Constructor_specContext):
pass
# Exit a parse tree produced by PlSqlParser#constructor_spec.
def exitConstructor_spec(self, ctx:PlSqlParser.Constructor_specContext):
pass
# Enter a parse tree produced by PlSqlParser#map_order_function_spec.
def enterMap_order_function_spec(self, ctx:PlSqlParser.Map_order_function_specContext):
pass
# Exit a parse tree produced by PlSqlParser#map_order_function_spec.
def exitMap_order_function_spec(self, ctx:PlSqlParser.Map_order_function_specContext):
pass
# Enter a parse tree produced by PlSqlParser#pragma_clause.
def enterPragma_clause(self, ctx:PlSqlParser.Pragma_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#pragma_clause.
def exitPragma_clause(self, ctx:PlSqlParser.Pragma_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#pragma_elements.
def enterPragma_elements(self, ctx:PlSqlParser.Pragma_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#pragma_elements.
def exitPragma_elements(self, ctx:PlSqlParser.Pragma_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#type_elements_parameter.
def enterType_elements_parameter(self, ctx:PlSqlParser.Type_elements_parameterContext):
pass
# Exit a parse tree produced by PlSqlParser#type_elements_parameter.
def exitType_elements_parameter(self, ctx:PlSqlParser.Type_elements_parameterContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_sequence.
def enterDrop_sequence(self, ctx:PlSqlParser.Drop_sequenceContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_sequence.
def exitDrop_sequence(self, ctx:PlSqlParser.Drop_sequenceContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_sequence.
def enterAlter_sequence(self, ctx:PlSqlParser.Alter_sequenceContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_sequence.
def exitAlter_sequence(self, ctx:PlSqlParser.Alter_sequenceContext):
pass
# Enter a parse tree produced by PlSqlParser#create_sequence.
def enterCreate_sequence(self, ctx:PlSqlParser.Create_sequenceContext):
pass
# Exit a parse tree produced by PlSqlParser#create_sequence.
def exitCreate_sequence(self, ctx:PlSqlParser.Create_sequenceContext):
pass
# Enter a parse tree produced by PlSqlParser#sequence_spec.
def enterSequence_spec(self, ctx:PlSqlParser.Sequence_specContext):
pass
# Exit a parse tree produced by PlSqlParser#sequence_spec.
def exitSequence_spec(self, ctx:PlSqlParser.Sequence_specContext):
pass
# Enter a parse tree produced by PlSqlParser#sequence_start_clause.
def enterSequence_start_clause(self, ctx:PlSqlParser.Sequence_start_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#sequence_start_clause.
def exitSequence_start_clause(self, ctx:PlSqlParser.Sequence_start_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#create_index.
def enterCreate_index(self, ctx:PlSqlParser.Create_indexContext):
pass
# Exit a parse tree produced by PlSqlParser#create_index.
def exitCreate_index(self, ctx:PlSqlParser.Create_indexContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_index.
def enterAlter_index(self, ctx:PlSqlParser.Alter_indexContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_index.
def exitAlter_index(self, ctx:PlSqlParser.Alter_indexContext):
pass
# Enter a parse tree produced by PlSqlParser#create_user.
def enterCreate_user(self, ctx:PlSqlParser.Create_userContext):
pass
# Exit a parse tree produced by PlSqlParser#create_user.
def exitCreate_user(self, ctx:PlSqlParser.Create_userContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_user.
def enterAlter_user(self, ctx:PlSqlParser.Alter_userContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_user.
def exitAlter_user(self, ctx:PlSqlParser.Alter_userContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_identified_by.
def enterAlter_identified_by(self, ctx:PlSqlParser.Alter_identified_byContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_identified_by.
def exitAlter_identified_by(self, ctx:PlSqlParser.Alter_identified_byContext):
pass
# Enter a parse tree produced by PlSqlParser#identified_by.
def enterIdentified_by(self, ctx:PlSqlParser.Identified_byContext):
pass
# Exit a parse tree produced by PlSqlParser#identified_by.
def exitIdentified_by(self, ctx:PlSqlParser.Identified_byContext):
pass
# Enter a parse tree produced by PlSqlParser#identified_other_clause.
def enterIdentified_other_clause(self, ctx:PlSqlParser.Identified_other_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#identified_other_clause.
def exitIdentified_other_clause(self, ctx:PlSqlParser.Identified_other_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#user_tablespace_clause.
def enterUser_tablespace_clause(self, ctx:PlSqlParser.User_tablespace_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#user_tablespace_clause.
def exitUser_tablespace_clause(self, ctx:PlSqlParser.User_tablespace_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#quota_clause.
def enterQuota_clause(self, ctx:PlSqlParser.Quota_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#quota_clause.
def exitQuota_clause(self, ctx:PlSqlParser.Quota_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#profile_clause.
def enterProfile_clause(self, ctx:PlSqlParser.Profile_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#profile_clause.
def exitProfile_clause(self, ctx:PlSqlParser.Profile_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#role_clause.
def enterRole_clause(self, ctx:PlSqlParser.Role_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#role_clause.
def exitRole_clause(self, ctx:PlSqlParser.Role_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#user_default_role_clause.
def enterUser_default_role_clause(self, ctx:PlSqlParser.User_default_role_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#user_default_role_clause.
def exitUser_default_role_clause(self, ctx:PlSqlParser.User_default_role_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#password_expire_clause.
def enterPassword_expire_clause(self, ctx:PlSqlParser.Password_expire_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#password_expire_clause.
def exitPassword_expire_clause(self, ctx:PlSqlParser.Password_expire_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#user_lock_clause.
def enterUser_lock_clause(self, ctx:PlSqlParser.User_lock_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#user_lock_clause.
def exitUser_lock_clause(self, ctx:PlSqlParser.User_lock_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#user_editions_clause.
def enterUser_editions_clause(self, ctx:PlSqlParser.User_editions_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#user_editions_clause.
def exitUser_editions_clause(self, ctx:PlSqlParser.User_editions_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_user_editions_clause.
def enterAlter_user_editions_clause(self, ctx:PlSqlParser.Alter_user_editions_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_user_editions_clause.
def exitAlter_user_editions_clause(self, ctx:PlSqlParser.Alter_user_editions_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#proxy_clause.
def enterProxy_clause(self, ctx:PlSqlParser.Proxy_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#proxy_clause.
def exitProxy_clause(self, ctx:PlSqlParser.Proxy_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#container_names.
def enterContainer_names(self, ctx:PlSqlParser.Container_namesContext):
pass
# Exit a parse tree produced by PlSqlParser#container_names.
def exitContainer_names(self, ctx:PlSqlParser.Container_namesContext):
pass
# Enter a parse tree produced by PlSqlParser#set_container_data.
def enterSet_container_data(self, ctx:PlSqlParser.Set_container_dataContext):
pass
# Exit a parse tree produced by PlSqlParser#set_container_data.
def exitSet_container_data(self, ctx:PlSqlParser.Set_container_dataContext):
pass
# Enter a parse tree produced by PlSqlParser#add_rem_container_data.
def enterAdd_rem_container_data(self, ctx:PlSqlParser.Add_rem_container_dataContext):
pass
# Exit a parse tree produced by PlSqlParser#add_rem_container_data.
def exitAdd_rem_container_data(self, ctx:PlSqlParser.Add_rem_container_dataContext):
pass
# Enter a parse tree produced by PlSqlParser#container_data_clause.
def enterContainer_data_clause(self, ctx:PlSqlParser.Container_data_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#container_data_clause.
def exitContainer_data_clause(self, ctx:PlSqlParser.Container_data_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_index.
def enterDrop_index(self, ctx:PlSqlParser.Drop_indexContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_index.
def exitDrop_index(self, ctx:PlSqlParser.Drop_indexContext):
pass
# Enter a parse tree produced by PlSqlParser#grant_statement.
def enterGrant_statement(self, ctx:PlSqlParser.Grant_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#grant_statement.
def exitGrant_statement(self, ctx:PlSqlParser.Grant_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#container_clause.
def enterContainer_clause(self, ctx:PlSqlParser.Container_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#container_clause.
def exitContainer_clause(self, ctx:PlSqlParser.Container_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#create_view.
def enterCreate_view(self, ctx:PlSqlParser.Create_viewContext):
pass
# Exit a parse tree produced by PlSqlParser#create_view.
def exitCreate_view(self, ctx:PlSqlParser.Create_viewContext):
pass
# Enter a parse tree produced by PlSqlParser#view_options.
def enterView_options(self, ctx:PlSqlParser.View_optionsContext):
pass
# Exit a parse tree produced by PlSqlParser#view_options.
def exitView_options(self, ctx:PlSqlParser.View_optionsContext):
pass
# Enter a parse tree produced by PlSqlParser#view_alias_constraint.
def enterView_alias_constraint(self, ctx:PlSqlParser.View_alias_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#view_alias_constraint.
def exitView_alias_constraint(self, ctx:PlSqlParser.View_alias_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#object_view_clause.
def enterObject_view_clause(self, ctx:PlSqlParser.Object_view_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#object_view_clause.
def exitObject_view_clause(self, ctx:PlSqlParser.Object_view_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#inline_constraint.
def enterInline_constraint(self, ctx:PlSqlParser.Inline_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#inline_constraint.
def exitInline_constraint(self, ctx:PlSqlParser.Inline_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#inline_ref_constraint.
def enterInline_ref_constraint(self, ctx:PlSqlParser.Inline_ref_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#inline_ref_constraint.
def exitInline_ref_constraint(self, ctx:PlSqlParser.Inline_ref_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#out_of_line_ref_constraint.
def enterOut_of_line_ref_constraint(self, ctx:PlSqlParser.Out_of_line_ref_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#out_of_line_ref_constraint.
def exitOut_of_line_ref_constraint(self, ctx:PlSqlParser.Out_of_line_ref_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#out_of_line_constraint.
def enterOut_of_line_constraint(self, ctx:PlSqlParser.Out_of_line_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#out_of_line_constraint.
def exitOut_of_line_constraint(self, ctx:PlSqlParser.Out_of_line_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#constraint_state.
def enterConstraint_state(self, ctx:PlSqlParser.Constraint_stateContext):
pass
# Exit a parse tree produced by PlSqlParser#constraint_state.
def exitConstraint_state(self, ctx:PlSqlParser.Constraint_stateContext):
pass
# Enter a parse tree produced by PlSqlParser#create_tablespace.
def enterCreate_tablespace(self, ctx:PlSqlParser.Create_tablespaceContext):
pass
# Exit a parse tree produced by PlSqlParser#create_tablespace.
def exitCreate_tablespace(self, ctx:PlSqlParser.Create_tablespaceContext):
pass
# Enter a parse tree produced by PlSqlParser#permanent_tablespace_clause.
def enterPermanent_tablespace_clause(self, ctx:PlSqlParser.Permanent_tablespace_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#permanent_tablespace_clause.
def exitPermanent_tablespace_clause(self, ctx:PlSqlParser.Permanent_tablespace_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#tablespace_encryption_spec.
def enterTablespace_encryption_spec(self, ctx:PlSqlParser.Tablespace_encryption_specContext):
pass
# Exit a parse tree produced by PlSqlParser#tablespace_encryption_spec.
def exitTablespace_encryption_spec(self, ctx:PlSqlParser.Tablespace_encryption_specContext):
pass
# Enter a parse tree produced by PlSqlParser#logging_clause.
def enterLogging_clause(self, ctx:PlSqlParser.Logging_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#logging_clause.
def exitLogging_clause(self, ctx:PlSqlParser.Logging_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#extent_management_clause.
def enterExtent_management_clause(self, ctx:PlSqlParser.Extent_management_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#extent_management_clause.
def exitExtent_management_clause(self, ctx:PlSqlParser.Extent_management_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#segment_management_clause.
def enterSegment_management_clause(self, ctx:PlSqlParser.Segment_management_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#segment_management_clause.
def exitSegment_management_clause(self, ctx:PlSqlParser.Segment_management_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#flashback_mode_clause.
def enterFlashback_mode_clause(self, ctx:PlSqlParser.Flashback_mode_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#flashback_mode_clause.
def exitFlashback_mode_clause(self, ctx:PlSqlParser.Flashback_mode_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#temporary_tablespace_clause.
def enterTemporary_tablespace_clause(self, ctx:PlSqlParser.Temporary_tablespace_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#temporary_tablespace_clause.
def exitTemporary_tablespace_clause(self, ctx:PlSqlParser.Temporary_tablespace_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#tablespace_group_clause.
def enterTablespace_group_clause(self, ctx:PlSqlParser.Tablespace_group_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#tablespace_group_clause.
def exitTablespace_group_clause(self, ctx:PlSqlParser.Tablespace_group_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#undo_tablespace_clause.
def enterUndo_tablespace_clause(self, ctx:PlSqlParser.Undo_tablespace_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#undo_tablespace_clause.
def exitUndo_tablespace_clause(self, ctx:PlSqlParser.Undo_tablespace_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#tablespace_retention_clause.
def enterTablespace_retention_clause(self, ctx:PlSqlParser.Tablespace_retention_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#tablespace_retention_clause.
def exitTablespace_retention_clause(self, ctx:PlSqlParser.Tablespace_retention_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#datafile_specification.
def enterDatafile_specification(self, ctx:PlSqlParser.Datafile_specificationContext):
pass
# Exit a parse tree produced by PlSqlParser#datafile_specification.
def exitDatafile_specification(self, ctx:PlSqlParser.Datafile_specificationContext):
pass
# Enter a parse tree produced by PlSqlParser#tempfile_specification.
def enterTempfile_specification(self, ctx:PlSqlParser.Tempfile_specificationContext):
pass
# Exit a parse tree produced by PlSqlParser#tempfile_specification.
def exitTempfile_specification(self, ctx:PlSqlParser.Tempfile_specificationContext):
pass
# Enter a parse tree produced by PlSqlParser#datafile_tempfile_spec.
def enterDatafile_tempfile_spec(self, ctx:PlSqlParser.Datafile_tempfile_specContext):
pass
# Exit a parse tree produced by PlSqlParser#datafile_tempfile_spec.
def exitDatafile_tempfile_spec(self, ctx:PlSqlParser.Datafile_tempfile_specContext):
pass
# Enter a parse tree produced by PlSqlParser#redo_log_file_spec.
def enterRedo_log_file_spec(self, ctx:PlSqlParser.Redo_log_file_specContext):
pass
# Exit a parse tree produced by PlSqlParser#redo_log_file_spec.
def exitRedo_log_file_spec(self, ctx:PlSqlParser.Redo_log_file_specContext):
pass
# Enter a parse tree produced by PlSqlParser#autoextend_clause.
def enterAutoextend_clause(self, ctx:PlSqlParser.Autoextend_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#autoextend_clause.
def exitAutoextend_clause(self, ctx:PlSqlParser.Autoextend_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#maxsize_clause.
def enterMaxsize_clause(self, ctx:PlSqlParser.Maxsize_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#maxsize_clause.
def exitMaxsize_clause(self, ctx:PlSqlParser.Maxsize_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#build_clause.
def enterBuild_clause(self, ctx:PlSqlParser.Build_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#build_clause.
def exitBuild_clause(self, ctx:PlSqlParser.Build_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#parallel_clause.
def enterParallel_clause(self, ctx:PlSqlParser.Parallel_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#parallel_clause.
def exitParallel_clause(self, ctx:PlSqlParser.Parallel_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#create_materialized_view_log.
def enterCreate_materialized_view_log(self, ctx:PlSqlParser.Create_materialized_view_logContext):
pass
# Exit a parse tree produced by PlSqlParser#create_materialized_view_log.
def exitCreate_materialized_view_log(self, ctx:PlSqlParser.Create_materialized_view_logContext):
pass
# Enter a parse tree produced by PlSqlParser#new_values_clause.
def enterNew_values_clause(self, ctx:PlSqlParser.New_values_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#new_values_clause.
def exitNew_values_clause(self, ctx:PlSqlParser.New_values_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#mv_log_purge_clause.
def enterMv_log_purge_clause(self, ctx:PlSqlParser.Mv_log_purge_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#mv_log_purge_clause.
def exitMv_log_purge_clause(self, ctx:PlSqlParser.Mv_log_purge_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#create_materialized_view.
def enterCreate_materialized_view(self, ctx:PlSqlParser.Create_materialized_viewContext):
pass
# Exit a parse tree produced by PlSqlParser#create_materialized_view.
def exitCreate_materialized_view(self, ctx:PlSqlParser.Create_materialized_viewContext):
pass
# Enter a parse tree produced by PlSqlParser#create_mv_refresh.
def enterCreate_mv_refresh(self, ctx:PlSqlParser.Create_mv_refreshContext):
pass
# Exit a parse tree produced by PlSqlParser#create_mv_refresh.
def exitCreate_mv_refresh(self, ctx:PlSqlParser.Create_mv_refreshContext):
pass
# Enter a parse tree produced by PlSqlParser#create_table.
def enterCreate_table(self, ctx:PlSqlParser.Create_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#create_table.
def exitCreate_table(self, ctx:PlSqlParser.Create_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#xmltype_table.
def enterXmltype_table(self, ctx:PlSqlParser.Xmltype_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#xmltype_table.
def exitXmltype_table(self, ctx:PlSqlParser.Xmltype_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#xmltype_virtual_columns.
def enterXmltype_virtual_columns(self, ctx:PlSqlParser.Xmltype_virtual_columnsContext):
pass
# Exit a parse tree produced by PlSqlParser#xmltype_virtual_columns.
def exitXmltype_virtual_columns(self, ctx:PlSqlParser.Xmltype_virtual_columnsContext):
pass
# Enter a parse tree produced by PlSqlParser#xmltype_column_properties.
def enterXmltype_column_properties(self, ctx:PlSqlParser.Xmltype_column_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#xmltype_column_properties.
def exitXmltype_column_properties(self, ctx:PlSqlParser.Xmltype_column_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#xmltype_storage.
def enterXmltype_storage(self, ctx:PlSqlParser.Xmltype_storageContext):
pass
# Exit a parse tree produced by PlSqlParser#xmltype_storage.
def exitXmltype_storage(self, ctx:PlSqlParser.Xmltype_storageContext):
pass
# Enter a parse tree produced by PlSqlParser#xmlschema_spec.
def enterXmlschema_spec(self, ctx:PlSqlParser.Xmlschema_specContext):
pass
# Exit a parse tree produced by PlSqlParser#xmlschema_spec.
def exitXmlschema_spec(self, ctx:PlSqlParser.Xmlschema_specContext):
pass
# Enter a parse tree produced by PlSqlParser#object_table.
def enterObject_table(self, ctx:PlSqlParser.Object_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#object_table.
def exitObject_table(self, ctx:PlSqlParser.Object_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#oid_index_clause.
def enterOid_index_clause(self, ctx:PlSqlParser.Oid_index_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#oid_index_clause.
def exitOid_index_clause(self, ctx:PlSqlParser.Oid_index_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#oid_clause.
def enterOid_clause(self, ctx:PlSqlParser.Oid_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#oid_clause.
def exitOid_clause(self, ctx:PlSqlParser.Oid_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#object_properties.
def enterObject_properties(self, ctx:PlSqlParser.Object_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#object_properties.
def exitObject_properties(self, ctx:PlSqlParser.Object_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#object_table_substitution.
def enterObject_table_substitution(self, ctx:PlSqlParser.Object_table_substitutionContext):
pass
# Exit a parse tree produced by PlSqlParser#object_table_substitution.
def exitObject_table_substitution(self, ctx:PlSqlParser.Object_table_substitutionContext):
pass
# Enter a parse tree produced by PlSqlParser#relational_table.
def enterRelational_table(self, ctx:PlSqlParser.Relational_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#relational_table.
def exitRelational_table(self, ctx:PlSqlParser.Relational_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#relational_properties.
def enterRelational_properties(self, ctx:PlSqlParser.Relational_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#relational_properties.
def exitRelational_properties(self, ctx:PlSqlParser.Relational_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#table_partitioning_clauses.
def enterTable_partitioning_clauses(self, ctx:PlSqlParser.Table_partitioning_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#table_partitioning_clauses.
def exitTable_partitioning_clauses(self, ctx:PlSqlParser.Table_partitioning_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#table_range_partition_by_clause.
def enterTable_range_partition_by_clause(self, ctx:PlSqlParser.Table_range_partition_by_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#table_range_partition_by_clause.
def exitTable_range_partition_by_clause(self, ctx:PlSqlParser.Table_range_partition_by_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#datatype_null_enable.
def enterDatatype_null_enable(self, ctx:PlSqlParser.Datatype_null_enableContext):
pass
# Exit a parse tree produced by PlSqlParser#datatype_null_enable.
def exitDatatype_null_enable(self, ctx:PlSqlParser.Datatype_null_enableContext):
pass
# Enter a parse tree produced by PlSqlParser#size_clause.
def enterSize_clause(self, ctx:PlSqlParser.Size_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#size_clause.
def exitSize_clause(self, ctx:PlSqlParser.Size_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#table_compression.
def enterTable_compression(self, ctx:PlSqlParser.Table_compressionContext):
pass
# Exit a parse tree produced by PlSqlParser#table_compression.
def exitTable_compression(self, ctx:PlSqlParser.Table_compressionContext):
pass
# Enter a parse tree produced by PlSqlParser#physical_attributes_clause.
def enterPhysical_attributes_clause(self, ctx:PlSqlParser.Physical_attributes_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#physical_attributes_clause.
def exitPhysical_attributes_clause(self, ctx:PlSqlParser.Physical_attributes_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#storage_clause.
def enterStorage_clause(self, ctx:PlSqlParser.Storage_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#storage_clause.
def exitStorage_clause(self, ctx:PlSqlParser.Storage_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#deferred_segment_creation.
def enterDeferred_segment_creation(self, ctx:PlSqlParser.Deferred_segment_creationContext):
pass
# Exit a parse tree produced by PlSqlParser#deferred_segment_creation.
def exitDeferred_segment_creation(self, ctx:PlSqlParser.Deferred_segment_creationContext):
pass
# Enter a parse tree produced by PlSqlParser#segment_attributes_clause.
def enterSegment_attributes_clause(self, ctx:PlSqlParser.Segment_attributes_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#segment_attributes_clause.
def exitSegment_attributes_clause(self, ctx:PlSqlParser.Segment_attributes_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#physical_properties.
def enterPhysical_properties(self, ctx:PlSqlParser.Physical_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#physical_properties.
def exitPhysical_properties(self, ctx:PlSqlParser.Physical_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#row_movement_clause.
def enterRow_movement_clause(self, ctx:PlSqlParser.Row_movement_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#row_movement_clause.
def exitRow_movement_clause(self, ctx:PlSqlParser.Row_movement_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#flashback_archive_clause.
def enterFlashback_archive_clause(self, ctx:PlSqlParser.Flashback_archive_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#flashback_archive_clause.
def exitFlashback_archive_clause(self, ctx:PlSqlParser.Flashback_archive_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#log_grp.
def enterLog_grp(self, ctx:PlSqlParser.Log_grpContext):
pass
# Exit a parse tree produced by PlSqlParser#log_grp.
def exitLog_grp(self, ctx:PlSqlParser.Log_grpContext):
pass
# Enter a parse tree produced by PlSqlParser#supplemental_table_logging.
def enterSupplemental_table_logging(self, ctx:PlSqlParser.Supplemental_table_loggingContext):
pass
# Exit a parse tree produced by PlSqlParser#supplemental_table_logging.
def exitSupplemental_table_logging(self, ctx:PlSqlParser.Supplemental_table_loggingContext):
pass
# Enter a parse tree produced by PlSqlParser#supplemental_log_grp_clause.
def enterSupplemental_log_grp_clause(self, ctx:PlSqlParser.Supplemental_log_grp_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#supplemental_log_grp_clause.
def exitSupplemental_log_grp_clause(self, ctx:PlSqlParser.Supplemental_log_grp_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#supplemental_id_key_clause.
def enterSupplemental_id_key_clause(self, ctx:PlSqlParser.Supplemental_id_key_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#supplemental_id_key_clause.
def exitSupplemental_id_key_clause(self, ctx:PlSqlParser.Supplemental_id_key_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#allocate_extent_clause.
def enterAllocate_extent_clause(self, ctx:PlSqlParser.Allocate_extent_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#allocate_extent_clause.
def exitAllocate_extent_clause(self, ctx:PlSqlParser.Allocate_extent_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#deallocate_unused_clause.
def enterDeallocate_unused_clause(self, ctx:PlSqlParser.Deallocate_unused_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#deallocate_unused_clause.
def exitDeallocate_unused_clause(self, ctx:PlSqlParser.Deallocate_unused_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#shrink_clause.
def enterShrink_clause(self, ctx:PlSqlParser.Shrink_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#shrink_clause.
def exitShrink_clause(self, ctx:PlSqlParser.Shrink_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#records_per_block_clause.
def enterRecords_per_block_clause(self, ctx:PlSqlParser.Records_per_block_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#records_per_block_clause.
def exitRecords_per_block_clause(self, ctx:PlSqlParser.Records_per_block_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#upgrade_table_clause.
def enterUpgrade_table_clause(self, ctx:PlSqlParser.Upgrade_table_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#upgrade_table_clause.
def exitUpgrade_table_clause(self, ctx:PlSqlParser.Upgrade_table_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_table.
def enterDrop_table(self, ctx:PlSqlParser.Drop_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_table.
def exitDrop_table(self, ctx:PlSqlParser.Drop_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#comment_on_column.
def enterComment_on_column(self, ctx:PlSqlParser.Comment_on_columnContext):
pass
# Exit a parse tree produced by PlSqlParser#comment_on_column.
def exitComment_on_column(self, ctx:PlSqlParser.Comment_on_columnContext):
pass
# Enter a parse tree produced by PlSqlParser#enable_or_disable.
def enterEnable_or_disable(self, ctx:PlSqlParser.Enable_or_disableContext):
pass
# Exit a parse tree produced by PlSqlParser#enable_or_disable.
def exitEnable_or_disable(self, ctx:PlSqlParser.Enable_or_disableContext):
pass
# Enter a parse tree produced by PlSqlParser#allow_or_disallow.
def enterAllow_or_disallow(self, ctx:PlSqlParser.Allow_or_disallowContext):
pass
# Exit a parse tree produced by PlSqlParser#allow_or_disallow.
def exitAllow_or_disallow(self, ctx:PlSqlParser.Allow_or_disallowContext):
pass
# Enter a parse tree produced by PlSqlParser#create_synonym.
def enterCreate_synonym(self, ctx:PlSqlParser.Create_synonymContext):
pass
# Exit a parse tree produced by PlSqlParser#create_synonym.
def exitCreate_synonym(self, ctx:PlSqlParser.Create_synonymContext):
pass
# Enter a parse tree produced by PlSqlParser#comment_on_table.
def enterComment_on_table(self, ctx:PlSqlParser.Comment_on_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#comment_on_table.
def exitComment_on_table(self, ctx:PlSqlParser.Comment_on_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_table.
def enterAlter_table(self, ctx:PlSqlParser.Alter_tableContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_table.
def exitAlter_table(self, ctx:PlSqlParser.Alter_tableContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_table_properties.
def enterAlter_table_properties(self, ctx:PlSqlParser.Alter_table_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_table_properties.
def exitAlter_table_properties(self, ctx:PlSqlParser.Alter_table_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_table_properties_1.
def enterAlter_table_properties_1(self, ctx:PlSqlParser.Alter_table_properties_1Context):
pass
# Exit a parse tree produced by PlSqlParser#alter_table_properties_1.
def exitAlter_table_properties_1(self, ctx:PlSqlParser.Alter_table_properties_1Context):
pass
# Enter a parse tree produced by PlSqlParser#alter_iot_clauses.
def enterAlter_iot_clauses(self, ctx:PlSqlParser.Alter_iot_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_iot_clauses.
def exitAlter_iot_clauses(self, ctx:PlSqlParser.Alter_iot_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_mapping_table_clause.
def enterAlter_mapping_table_clause(self, ctx:PlSqlParser.Alter_mapping_table_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_mapping_table_clause.
def exitAlter_mapping_table_clause(self, ctx:PlSqlParser.Alter_mapping_table_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_overflow_clause.
def enterAlter_overflow_clause(self, ctx:PlSqlParser.Alter_overflow_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_overflow_clause.
def exitAlter_overflow_clause(self, ctx:PlSqlParser.Alter_overflow_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#add_overflow_clause.
def enterAdd_overflow_clause(self, ctx:PlSqlParser.Add_overflow_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#add_overflow_clause.
def exitAdd_overflow_clause(self, ctx:PlSqlParser.Add_overflow_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#enable_disable_clause.
def enterEnable_disable_clause(self, ctx:PlSqlParser.Enable_disable_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#enable_disable_clause.
def exitEnable_disable_clause(self, ctx:PlSqlParser.Enable_disable_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#using_index_clause.
def enterUsing_index_clause(self, ctx:PlSqlParser.Using_index_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#using_index_clause.
def exitUsing_index_clause(self, ctx:PlSqlParser.Using_index_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#exceptions_clause.
def enterExceptions_clause(self, ctx:PlSqlParser.Exceptions_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#exceptions_clause.
def exitExceptions_clause(self, ctx:PlSqlParser.Exceptions_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#move_table_clause.
def enterMove_table_clause(self, ctx:PlSqlParser.Move_table_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#move_table_clause.
def exitMove_table_clause(self, ctx:PlSqlParser.Move_table_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#index_org_table_clause.
def enterIndex_org_table_clause(self, ctx:PlSqlParser.Index_org_table_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#index_org_table_clause.
def exitIndex_org_table_clause(self, ctx:PlSqlParser.Index_org_table_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#mapping_table_clause.
def enterMapping_table_clause(self, ctx:PlSqlParser.Mapping_table_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#mapping_table_clause.
def exitMapping_table_clause(self, ctx:PlSqlParser.Mapping_table_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#key_compression.
def enterKey_compression(self, ctx:PlSqlParser.Key_compressionContext):
pass
# Exit a parse tree produced by PlSqlParser#key_compression.
def exitKey_compression(self, ctx:PlSqlParser.Key_compressionContext):
pass
# Enter a parse tree produced by PlSqlParser#index_org_overflow_clause.
def enterIndex_org_overflow_clause(self, ctx:PlSqlParser.Index_org_overflow_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#index_org_overflow_clause.
def exitIndex_org_overflow_clause(self, ctx:PlSqlParser.Index_org_overflow_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#column_clauses.
def enterColumn_clauses(self, ctx:PlSqlParser.Column_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#column_clauses.
def exitColumn_clauses(self, ctx:PlSqlParser.Column_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#modify_collection_retrieval.
def enterModify_collection_retrieval(self, ctx:PlSqlParser.Modify_collection_retrievalContext):
pass
# Exit a parse tree produced by PlSqlParser#modify_collection_retrieval.
def exitModify_collection_retrieval(self, ctx:PlSqlParser.Modify_collection_retrievalContext):
pass
# Enter a parse tree produced by PlSqlParser#collection_item.
def enterCollection_item(self, ctx:PlSqlParser.Collection_itemContext):
pass
# Exit a parse tree produced by PlSqlParser#collection_item.
def exitCollection_item(self, ctx:PlSqlParser.Collection_itemContext):
pass
# Enter a parse tree produced by PlSqlParser#rename_column_clause.
def enterRename_column_clause(self, ctx:PlSqlParser.Rename_column_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#rename_column_clause.
def exitRename_column_clause(self, ctx:PlSqlParser.Rename_column_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#old_column_name.
def enterOld_column_name(self, ctx:PlSqlParser.Old_column_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#old_column_name.
def exitOld_column_name(self, ctx:PlSqlParser.Old_column_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#new_column_name.
def enterNew_column_name(self, ctx:PlSqlParser.New_column_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#new_column_name.
def exitNew_column_name(self, ctx:PlSqlParser.New_column_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#add_modify_drop_column_clauses.
def enterAdd_modify_drop_column_clauses(self, ctx:PlSqlParser.Add_modify_drop_column_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#add_modify_drop_column_clauses.
def exitAdd_modify_drop_column_clauses(self, ctx:PlSqlParser.Add_modify_drop_column_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_column_clause.
def enterDrop_column_clause(self, ctx:PlSqlParser.Drop_column_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_column_clause.
def exitDrop_column_clause(self, ctx:PlSqlParser.Drop_column_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#modify_column_clauses.
def enterModify_column_clauses(self, ctx:PlSqlParser.Modify_column_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#modify_column_clauses.
def exitModify_column_clauses(self, ctx:PlSqlParser.Modify_column_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#modify_col_properties.
def enterModify_col_properties(self, ctx:PlSqlParser.Modify_col_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#modify_col_properties.
def exitModify_col_properties(self, ctx:PlSqlParser.Modify_col_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#modify_col_substitutable.
def enterModify_col_substitutable(self, ctx:PlSqlParser.Modify_col_substitutableContext):
pass
# Exit a parse tree produced by PlSqlParser#modify_col_substitutable.
def exitModify_col_substitutable(self, ctx:PlSqlParser.Modify_col_substitutableContext):
pass
# Enter a parse tree produced by PlSqlParser#add_column_clause.
def enterAdd_column_clause(self, ctx:PlSqlParser.Add_column_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#add_column_clause.
def exitAdd_column_clause(self, ctx:PlSqlParser.Add_column_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#alter_varray_col_properties.
def enterAlter_varray_col_properties(self, ctx:PlSqlParser.Alter_varray_col_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#alter_varray_col_properties.
def exitAlter_varray_col_properties(self, ctx:PlSqlParser.Alter_varray_col_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#varray_col_properties.
def enterVarray_col_properties(self, ctx:PlSqlParser.Varray_col_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#varray_col_properties.
def exitVarray_col_properties(self, ctx:PlSqlParser.Varray_col_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#varray_storage_clause.
def enterVarray_storage_clause(self, ctx:PlSqlParser.Varray_storage_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#varray_storage_clause.
def exitVarray_storage_clause(self, ctx:PlSqlParser.Varray_storage_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_segname.
def enterLob_segname(self, ctx:PlSqlParser.Lob_segnameContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_segname.
def exitLob_segname(self, ctx:PlSqlParser.Lob_segnameContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_item.
def enterLob_item(self, ctx:PlSqlParser.Lob_itemContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_item.
def exitLob_item(self, ctx:PlSqlParser.Lob_itemContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_storage_parameters.
def enterLob_storage_parameters(self, ctx:PlSqlParser.Lob_storage_parametersContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_storage_parameters.
def exitLob_storage_parameters(self, ctx:PlSqlParser.Lob_storage_parametersContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_storage_clause.
def enterLob_storage_clause(self, ctx:PlSqlParser.Lob_storage_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_storage_clause.
def exitLob_storage_clause(self, ctx:PlSqlParser.Lob_storage_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#modify_lob_storage_clause.
def enterModify_lob_storage_clause(self, ctx:PlSqlParser.Modify_lob_storage_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#modify_lob_storage_clause.
def exitModify_lob_storage_clause(self, ctx:PlSqlParser.Modify_lob_storage_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#modify_lob_parameters.
def enterModify_lob_parameters(self, ctx:PlSqlParser.Modify_lob_parametersContext):
pass
# Exit a parse tree produced by PlSqlParser#modify_lob_parameters.
def exitModify_lob_parameters(self, ctx:PlSqlParser.Modify_lob_parametersContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_parameters.
def enterLob_parameters(self, ctx:PlSqlParser.Lob_parametersContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_parameters.
def exitLob_parameters(self, ctx:PlSqlParser.Lob_parametersContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_deduplicate_clause.
def enterLob_deduplicate_clause(self, ctx:PlSqlParser.Lob_deduplicate_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_deduplicate_clause.
def exitLob_deduplicate_clause(self, ctx:PlSqlParser.Lob_deduplicate_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_compression_clause.
def enterLob_compression_clause(self, ctx:PlSqlParser.Lob_compression_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_compression_clause.
def exitLob_compression_clause(self, ctx:PlSqlParser.Lob_compression_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#lob_retention_clause.
def enterLob_retention_clause(self, ctx:PlSqlParser.Lob_retention_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#lob_retention_clause.
def exitLob_retention_clause(self, ctx:PlSqlParser.Lob_retention_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#encryption_spec.
def enterEncryption_spec(self, ctx:PlSqlParser.Encryption_specContext):
pass
# Exit a parse tree produced by PlSqlParser#encryption_spec.
def exitEncryption_spec(self, ctx:PlSqlParser.Encryption_specContext):
pass
# Enter a parse tree produced by PlSqlParser#tablespace.
def enterTablespace(self, ctx:PlSqlParser.TablespaceContext):
pass
# Exit a parse tree produced by PlSqlParser#tablespace.
def exitTablespace(self, ctx:PlSqlParser.TablespaceContext):
pass
# Enter a parse tree produced by PlSqlParser#varray_item.
def enterVarray_item(self, ctx:PlSqlParser.Varray_itemContext):
pass
# Exit a parse tree produced by PlSqlParser#varray_item.
def exitVarray_item(self, ctx:PlSqlParser.Varray_itemContext):
pass
# Enter a parse tree produced by PlSqlParser#column_properties.
def enterColumn_properties(self, ctx:PlSqlParser.Column_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#column_properties.
def exitColumn_properties(self, ctx:PlSqlParser.Column_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#column_definition.
def enterColumn_definition(self, ctx:PlSqlParser.Column_definitionContext):
pass
# Exit a parse tree produced by PlSqlParser#column_definition.
def exitColumn_definition(self, ctx:PlSqlParser.Column_definitionContext):
pass
# Enter a parse tree produced by PlSqlParser#virtual_column_definition.
def enterVirtual_column_definition(self, ctx:PlSqlParser.Virtual_column_definitionContext):
pass
# Exit a parse tree produced by PlSqlParser#virtual_column_definition.
def exitVirtual_column_definition(self, ctx:PlSqlParser.Virtual_column_definitionContext):
pass
# Enter a parse tree produced by PlSqlParser#out_of_line_part_storage.
def enterOut_of_line_part_storage(self, ctx:PlSqlParser.Out_of_line_part_storageContext):
pass
# Exit a parse tree produced by PlSqlParser#out_of_line_part_storage.
def exitOut_of_line_part_storage(self, ctx:PlSqlParser.Out_of_line_part_storageContext):
pass
# Enter a parse tree produced by PlSqlParser#nested_table_col_properties.
def enterNested_table_col_properties(self, ctx:PlSqlParser.Nested_table_col_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#nested_table_col_properties.
def exitNested_table_col_properties(self, ctx:PlSqlParser.Nested_table_col_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#nested_item.
def enterNested_item(self, ctx:PlSqlParser.Nested_itemContext):
pass
# Exit a parse tree produced by PlSqlParser#nested_item.
def exitNested_item(self, ctx:PlSqlParser.Nested_itemContext):
pass
# Enter a parse tree produced by PlSqlParser#substitutable_column_clause.
def enterSubstitutable_column_clause(self, ctx:PlSqlParser.Substitutable_column_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#substitutable_column_clause.
def exitSubstitutable_column_clause(self, ctx:PlSqlParser.Substitutable_column_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#partition_name.
def enterPartition_name(self, ctx:PlSqlParser.Partition_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#partition_name.
def exitPartition_name(self, ctx:PlSqlParser.Partition_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#supplemental_logging_props.
def enterSupplemental_logging_props(self, ctx:PlSqlParser.Supplemental_logging_propsContext):
pass
# Exit a parse tree produced by PlSqlParser#supplemental_logging_props.
def exitSupplemental_logging_props(self, ctx:PlSqlParser.Supplemental_logging_propsContext):
pass
# Enter a parse tree produced by PlSqlParser#column_or_attribute.
def enterColumn_or_attribute(self, ctx:PlSqlParser.Column_or_attributeContext):
pass
# Exit a parse tree produced by PlSqlParser#column_or_attribute.
def exitColumn_or_attribute(self, ctx:PlSqlParser.Column_or_attributeContext):
pass
# Enter a parse tree produced by PlSqlParser#object_type_col_properties.
def enterObject_type_col_properties(self, ctx:PlSqlParser.Object_type_col_propertiesContext):
pass
# Exit a parse tree produced by PlSqlParser#object_type_col_properties.
def exitObject_type_col_properties(self, ctx:PlSqlParser.Object_type_col_propertiesContext):
pass
# Enter a parse tree produced by PlSqlParser#constraint_clauses.
def enterConstraint_clauses(self, ctx:PlSqlParser.Constraint_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#constraint_clauses.
def exitConstraint_clauses(self, ctx:PlSqlParser.Constraint_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#old_constraint_name.
def enterOld_constraint_name(self, ctx:PlSqlParser.Old_constraint_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#old_constraint_name.
def exitOld_constraint_name(self, ctx:PlSqlParser.Old_constraint_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#new_constraint_name.
def enterNew_constraint_name(self, ctx:PlSqlParser.New_constraint_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#new_constraint_name.
def exitNew_constraint_name(self, ctx:PlSqlParser.New_constraint_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_constraint_clause.
def enterDrop_constraint_clause(self, ctx:PlSqlParser.Drop_constraint_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_constraint_clause.
def exitDrop_constraint_clause(self, ctx:PlSqlParser.Drop_constraint_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_primary_key_or_unique_or_generic_clause.
def enterDrop_primary_key_or_unique_or_generic_clause(self, ctx:PlSqlParser.Drop_primary_key_or_unique_or_generic_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_primary_key_or_unique_or_generic_clause.
def exitDrop_primary_key_or_unique_or_generic_clause(self, ctx:PlSqlParser.Drop_primary_key_or_unique_or_generic_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#add_constraint.
def enterAdd_constraint(self, ctx:PlSqlParser.Add_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#add_constraint.
def exitAdd_constraint(self, ctx:PlSqlParser.Add_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#check_constraint.
def enterCheck_constraint(self, ctx:PlSqlParser.Check_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#check_constraint.
def exitCheck_constraint(self, ctx:PlSqlParser.Check_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#drop_constraint.
def enterDrop_constraint(self, ctx:PlSqlParser.Drop_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#drop_constraint.
def exitDrop_constraint(self, ctx:PlSqlParser.Drop_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#enable_constraint.
def enterEnable_constraint(self, ctx:PlSqlParser.Enable_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#enable_constraint.
def exitEnable_constraint(self, ctx:PlSqlParser.Enable_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#disable_constraint.
def enterDisable_constraint(self, ctx:PlSqlParser.Disable_constraintContext):
pass
# Exit a parse tree produced by PlSqlParser#disable_constraint.
def exitDisable_constraint(self, ctx:PlSqlParser.Disable_constraintContext):
pass
# Enter a parse tree produced by PlSqlParser#foreign_key_clause.
def enterForeign_key_clause(self, ctx:PlSqlParser.Foreign_key_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#foreign_key_clause.
def exitForeign_key_clause(self, ctx:PlSqlParser.Foreign_key_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#references_clause.
def enterReferences_clause(self, ctx:PlSqlParser.References_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#references_clause.
def exitReferences_clause(self, ctx:PlSqlParser.References_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#on_delete_clause.
def enterOn_delete_clause(self, ctx:PlSqlParser.On_delete_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#on_delete_clause.
def exitOn_delete_clause(self, ctx:PlSqlParser.On_delete_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#unique_key_clause.
def enterUnique_key_clause(self, ctx:PlSqlParser.Unique_key_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#unique_key_clause.
def exitUnique_key_clause(self, ctx:PlSqlParser.Unique_key_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#primary_key_clause.
def enterPrimary_key_clause(self, ctx:PlSqlParser.Primary_key_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#primary_key_clause.
def exitPrimary_key_clause(self, ctx:PlSqlParser.Primary_key_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#anonymous_block.
def enterAnonymous_block(self, ctx:PlSqlParser.Anonymous_blockContext):
pass
# Exit a parse tree produced by PlSqlParser#anonymous_block.
def exitAnonymous_block(self, ctx:PlSqlParser.Anonymous_blockContext):
pass
# Enter a parse tree produced by PlSqlParser#invoker_rights_clause.
def enterInvoker_rights_clause(self, ctx:PlSqlParser.Invoker_rights_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#invoker_rights_clause.
def exitInvoker_rights_clause(self, ctx:PlSqlParser.Invoker_rights_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#compiler_parameters_clause.
def enterCompiler_parameters_clause(self, ctx:PlSqlParser.Compiler_parameters_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#compiler_parameters_clause.
def exitCompiler_parameters_clause(self, ctx:PlSqlParser.Compiler_parameters_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#call_spec.
def enterCall_spec(self, ctx:PlSqlParser.Call_specContext):
pass
# Exit a parse tree produced by PlSqlParser#call_spec.
def exitCall_spec(self, ctx:PlSqlParser.Call_specContext):
pass
# Enter a parse tree produced by PlSqlParser#java_spec.
def enterJava_spec(self, ctx:PlSqlParser.Java_specContext):
pass
# Exit a parse tree produced by PlSqlParser#java_spec.
def exitJava_spec(self, ctx:PlSqlParser.Java_specContext):
pass
# Enter a parse tree produced by PlSqlParser#c_spec.
def enterC_spec(self, ctx:PlSqlParser.C_specContext):
pass
# Exit a parse tree produced by PlSqlParser#c_spec.
def exitC_spec(self, ctx:PlSqlParser.C_specContext):
pass
# Enter a parse tree produced by PlSqlParser#c_agent_in_clause.
def enterC_agent_in_clause(self, ctx:PlSqlParser.C_agent_in_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#c_agent_in_clause.
def exitC_agent_in_clause(self, ctx:PlSqlParser.C_agent_in_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#c_parameters_clause.
def enterC_parameters_clause(self, ctx:PlSqlParser.C_parameters_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#c_parameters_clause.
def exitC_parameters_clause(self, ctx:PlSqlParser.C_parameters_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#parameter.
def enterParameter(self, ctx:PlSqlParser.ParameterContext):
pass
# Exit a parse tree produced by PlSqlParser#parameter.
def exitParameter(self, ctx:PlSqlParser.ParameterContext):
pass
# Enter a parse tree produced by PlSqlParser#default_value_part.
def enterDefault_value_part(self, ctx:PlSqlParser.Default_value_partContext):
pass
# Exit a parse tree produced by PlSqlParser#default_value_part.
def exitDefault_value_part(self, ctx:PlSqlParser.Default_value_partContext):
pass
# Enter a parse tree produced by PlSqlParser#seq_of_declare_specs.
def enterSeq_of_declare_specs(self, ctx:PlSqlParser.Seq_of_declare_specsContext):
pass
# Exit a parse tree produced by PlSqlParser#seq_of_declare_specs.
def exitSeq_of_declare_specs(self, ctx:PlSqlParser.Seq_of_declare_specsContext):
pass
# Enter a parse tree produced by PlSqlParser#declare_spec.
def enterDeclare_spec(self, ctx:PlSqlParser.Declare_specContext):
pass
# Exit a parse tree produced by PlSqlParser#declare_spec.
def exitDeclare_spec(self, ctx:PlSqlParser.Declare_specContext):
pass
# Enter a parse tree produced by PlSqlParser#variable_declaration.
def enterVariable_declaration(self, ctx:PlSqlParser.Variable_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#variable_declaration.
def exitVariable_declaration(self, ctx:PlSqlParser.Variable_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#subtype_declaration.
def enterSubtype_declaration(self, ctx:PlSqlParser.Subtype_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#subtype_declaration.
def exitSubtype_declaration(self, ctx:PlSqlParser.Subtype_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#cursor_declaration.
def enterCursor_declaration(self, ctx:PlSqlParser.Cursor_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#cursor_declaration.
def exitCursor_declaration(self, ctx:PlSqlParser.Cursor_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#parameter_spec.
def enterParameter_spec(self, ctx:PlSqlParser.Parameter_specContext):
pass
# Exit a parse tree produced by PlSqlParser#parameter_spec.
def exitParameter_spec(self, ctx:PlSqlParser.Parameter_specContext):
pass
# Enter a parse tree produced by PlSqlParser#exception_declaration.
def enterException_declaration(self, ctx:PlSqlParser.Exception_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#exception_declaration.
def exitException_declaration(self, ctx:PlSqlParser.Exception_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#pragma_declaration.
def enterPragma_declaration(self, ctx:PlSqlParser.Pragma_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#pragma_declaration.
def exitPragma_declaration(self, ctx:PlSqlParser.Pragma_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#record_type_def.
def enterRecord_type_def(self, ctx:PlSqlParser.Record_type_defContext):
pass
# Exit a parse tree produced by PlSqlParser#record_type_def.
def exitRecord_type_def(self, ctx:PlSqlParser.Record_type_defContext):
pass
# Enter a parse tree produced by PlSqlParser#field_spec.
def enterField_spec(self, ctx:PlSqlParser.Field_specContext):
pass
# Exit a parse tree produced by PlSqlParser#field_spec.
def exitField_spec(self, ctx:PlSqlParser.Field_specContext):
pass
# Enter a parse tree produced by PlSqlParser#ref_cursor_type_def.
def enterRef_cursor_type_def(self, ctx:PlSqlParser.Ref_cursor_type_defContext):
pass
# Exit a parse tree produced by PlSqlParser#ref_cursor_type_def.
def exitRef_cursor_type_def(self, ctx:PlSqlParser.Ref_cursor_type_defContext):
pass
# Enter a parse tree produced by PlSqlParser#type_declaration.
def enterType_declaration(self, ctx:PlSqlParser.Type_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#type_declaration.
def exitType_declaration(self, ctx:PlSqlParser.Type_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#table_type_def.
def enterTable_type_def(self, ctx:PlSqlParser.Table_type_defContext):
pass
# Exit a parse tree produced by PlSqlParser#table_type_def.
def exitTable_type_def(self, ctx:PlSqlParser.Table_type_defContext):
pass
# Enter a parse tree produced by PlSqlParser#table_indexed_by_part.
def enterTable_indexed_by_part(self, ctx:PlSqlParser.Table_indexed_by_partContext):
pass
# Exit a parse tree produced by PlSqlParser#table_indexed_by_part.
def exitTable_indexed_by_part(self, ctx:PlSqlParser.Table_indexed_by_partContext):
pass
# Enter a parse tree produced by PlSqlParser#varray_type_def.
def enterVarray_type_def(self, ctx:PlSqlParser.Varray_type_defContext):
pass
# Exit a parse tree produced by PlSqlParser#varray_type_def.
def exitVarray_type_def(self, ctx:PlSqlParser.Varray_type_defContext):
pass
# Enter a parse tree produced by PlSqlParser#seq_of_statements.
def enterSeq_of_statements(self, ctx:PlSqlParser.Seq_of_statementsContext):
pass
# Exit a parse tree produced by PlSqlParser#seq_of_statements.
def exitSeq_of_statements(self, ctx:PlSqlParser.Seq_of_statementsContext):
pass
# Enter a parse tree produced by PlSqlParser#label_declaration.
def enterLabel_declaration(self, ctx:PlSqlParser.Label_declarationContext):
pass
# Exit a parse tree produced by PlSqlParser#label_declaration.
def exitLabel_declaration(self, ctx:PlSqlParser.Label_declarationContext):
pass
# Enter a parse tree produced by PlSqlParser#statement.
def enterStatement(self, ctx:PlSqlParser.StatementContext):
pass
# Exit a parse tree produced by PlSqlParser#statement.
def exitStatement(self, ctx:PlSqlParser.StatementContext):
pass
# Enter a parse tree produced by PlSqlParser#swallow_to_semi.
def enterSwallow_to_semi(self, ctx:PlSqlParser.Swallow_to_semiContext):
pass
# Exit a parse tree produced by PlSqlParser#swallow_to_semi.
def exitSwallow_to_semi(self, ctx:PlSqlParser.Swallow_to_semiContext):
pass
# Enter a parse tree produced by PlSqlParser#assignment_statement.
def enterAssignment_statement(self, ctx:PlSqlParser.Assignment_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#assignment_statement.
def exitAssignment_statement(self, ctx:PlSqlParser.Assignment_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#continue_statement.
def enterContinue_statement(self, ctx:PlSqlParser.Continue_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#continue_statement.
def exitContinue_statement(self, ctx:PlSqlParser.Continue_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#exit_statement.
def enterExit_statement(self, ctx:PlSqlParser.Exit_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#exit_statement.
def exitExit_statement(self, ctx:PlSqlParser.Exit_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#goto_statement.
def enterGoto_statement(self, ctx:PlSqlParser.Goto_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#goto_statement.
def exitGoto_statement(self, ctx:PlSqlParser.Goto_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#if_statement.
def enterIf_statement(self, ctx:PlSqlParser.If_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#if_statement.
def exitIf_statement(self, ctx:PlSqlParser.If_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#elsif_part.
def enterElsif_part(self, ctx:PlSqlParser.Elsif_partContext):
pass
# Exit a parse tree produced by PlSqlParser#elsif_part.
def exitElsif_part(self, ctx:PlSqlParser.Elsif_partContext):
pass
# Enter a parse tree produced by PlSqlParser#else_part.
def enterElse_part(self, ctx:PlSqlParser.Else_partContext):
pass
# Exit a parse tree produced by PlSqlParser#else_part.
def exitElse_part(self, ctx:PlSqlParser.Else_partContext):
pass
# Enter a parse tree produced by PlSqlParser#loop_statement.
def enterLoop_statement(self, ctx:PlSqlParser.Loop_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#loop_statement.
def exitLoop_statement(self, ctx:PlSqlParser.Loop_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#cursor_loop_param.
def enterCursor_loop_param(self, ctx:PlSqlParser.Cursor_loop_paramContext):
pass
# Exit a parse tree produced by PlSqlParser#cursor_loop_param.
def exitCursor_loop_param(self, ctx:PlSqlParser.Cursor_loop_paramContext):
pass
# Enter a parse tree produced by PlSqlParser#forall_statement.
def enterForall_statement(self, ctx:PlSqlParser.Forall_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#forall_statement.
def exitForall_statement(self, ctx:PlSqlParser.Forall_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#bounds_clause.
def enterBounds_clause(self, ctx:PlSqlParser.Bounds_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#bounds_clause.
def exitBounds_clause(self, ctx:PlSqlParser.Bounds_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#between_bound.
def enterBetween_bound(self, ctx:PlSqlParser.Between_boundContext):
pass
# Exit a parse tree produced by PlSqlParser#between_bound.
def exitBetween_bound(self, ctx:PlSqlParser.Between_boundContext):
pass
# Enter a parse tree produced by PlSqlParser#lower_bound.
def enterLower_bound(self, ctx:PlSqlParser.Lower_boundContext):
pass
# Exit a parse tree produced by PlSqlParser#lower_bound.
def exitLower_bound(self, ctx:PlSqlParser.Lower_boundContext):
pass
# Enter a parse tree produced by PlSqlParser#upper_bound.
def enterUpper_bound(self, ctx:PlSqlParser.Upper_boundContext):
pass
# Exit a parse tree produced by PlSqlParser#upper_bound.
def exitUpper_bound(self, ctx:PlSqlParser.Upper_boundContext):
pass
# Enter a parse tree produced by PlSqlParser#null_statement.
def enterNull_statement(self, ctx:PlSqlParser.Null_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#null_statement.
def exitNull_statement(self, ctx:PlSqlParser.Null_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#raise_statement.
def enterRaise_statement(self, ctx:PlSqlParser.Raise_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#raise_statement.
def exitRaise_statement(self, ctx:PlSqlParser.Raise_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#return_statement.
def enterReturn_statement(self, ctx:PlSqlParser.Return_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#return_statement.
def exitReturn_statement(self, ctx:PlSqlParser.Return_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#function_call.
def enterFunction_call(self, ctx:PlSqlParser.Function_callContext):
pass
# Exit a parse tree produced by PlSqlParser#function_call.
def exitFunction_call(self, ctx:PlSqlParser.Function_callContext):
pass
# Enter a parse tree produced by PlSqlParser#pipe_row_statement.
def enterPipe_row_statement(self, ctx:PlSqlParser.Pipe_row_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#pipe_row_statement.
def exitPipe_row_statement(self, ctx:PlSqlParser.Pipe_row_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#body.
def enterBody(self, ctx:PlSqlParser.BodyContext):
pass
# Exit a parse tree produced by PlSqlParser#body.
def exitBody(self, ctx:PlSqlParser.BodyContext):
pass
# Enter a parse tree produced by PlSqlParser#exception_handler.
def enterException_handler(self, ctx:PlSqlParser.Exception_handlerContext):
pass
# Exit a parse tree produced by PlSqlParser#exception_handler.
def exitException_handler(self, ctx:PlSqlParser.Exception_handlerContext):
pass
# Enter a parse tree produced by PlSqlParser#trigger_block.
def enterTrigger_block(self, ctx:PlSqlParser.Trigger_blockContext):
pass
# Exit a parse tree produced by PlSqlParser#trigger_block.
def exitTrigger_block(self, ctx:PlSqlParser.Trigger_blockContext):
pass
# Enter a parse tree produced by PlSqlParser#block.
def enterBlock(self, ctx:PlSqlParser.BlockContext):
pass
# Exit a parse tree produced by PlSqlParser#block.
def exitBlock(self, ctx:PlSqlParser.BlockContext):
pass
# Enter a parse tree produced by PlSqlParser#sql_statement.
def enterSql_statement(self, ctx:PlSqlParser.Sql_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#sql_statement.
def exitSql_statement(self, ctx:PlSqlParser.Sql_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#execute_immediate.
def enterExecute_immediate(self, ctx:PlSqlParser.Execute_immediateContext):
pass
# Exit a parse tree produced by PlSqlParser#execute_immediate.
def exitExecute_immediate(self, ctx:PlSqlParser.Execute_immediateContext):
pass
# Enter a parse tree produced by PlSqlParser#dynamic_returning_clause.
def enterDynamic_returning_clause(self, ctx:PlSqlParser.Dynamic_returning_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#dynamic_returning_clause.
def exitDynamic_returning_clause(self, ctx:PlSqlParser.Dynamic_returning_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#data_manipulation_language_statements.
def enterData_manipulation_language_statements(self, ctx:PlSqlParser.Data_manipulation_language_statementsContext):
pass
# Exit a parse tree produced by PlSqlParser#data_manipulation_language_statements.
def exitData_manipulation_language_statements(self, ctx:PlSqlParser.Data_manipulation_language_statementsContext):
pass
# Enter a parse tree produced by PlSqlParser#cursor_manipulation_statements.
def enterCursor_manipulation_statements(self, ctx:PlSqlParser.Cursor_manipulation_statementsContext):
pass
# Exit a parse tree produced by PlSqlParser#cursor_manipulation_statements.
def exitCursor_manipulation_statements(self, ctx:PlSqlParser.Cursor_manipulation_statementsContext):
pass
# Enter a parse tree produced by PlSqlParser#close_statement.
def enterClose_statement(self, ctx:PlSqlParser.Close_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#close_statement.
def exitClose_statement(self, ctx:PlSqlParser.Close_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#open_statement.
def enterOpen_statement(self, ctx:PlSqlParser.Open_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#open_statement.
def exitOpen_statement(self, ctx:PlSqlParser.Open_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#fetch_statement.
def enterFetch_statement(self, ctx:PlSqlParser.Fetch_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#fetch_statement.
def exitFetch_statement(self, ctx:PlSqlParser.Fetch_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#open_for_statement.
def enterOpen_for_statement(self, ctx:PlSqlParser.Open_for_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#open_for_statement.
def exitOpen_for_statement(self, ctx:PlSqlParser.Open_for_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#transaction_control_statements.
def enterTransaction_control_statements(self, ctx:PlSqlParser.Transaction_control_statementsContext):
pass
# Exit a parse tree produced by PlSqlParser#transaction_control_statements.
def exitTransaction_control_statements(self, ctx:PlSqlParser.Transaction_control_statementsContext):
pass
# Enter a parse tree produced by PlSqlParser#set_transaction_command.
def enterSet_transaction_command(self, ctx:PlSqlParser.Set_transaction_commandContext):
pass
# Exit a parse tree produced by PlSqlParser#set_transaction_command.
def exitSet_transaction_command(self, ctx:PlSqlParser.Set_transaction_commandContext):
pass
# Enter a parse tree produced by PlSqlParser#set_constraint_command.
def enterSet_constraint_command(self, ctx:PlSqlParser.Set_constraint_commandContext):
pass
# Exit a parse tree produced by PlSqlParser#set_constraint_command.
def exitSet_constraint_command(self, ctx:PlSqlParser.Set_constraint_commandContext):
pass
# Enter a parse tree produced by PlSqlParser#commit_statement.
def enterCommit_statement(self, ctx:PlSqlParser.Commit_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#commit_statement.
def exitCommit_statement(self, ctx:PlSqlParser.Commit_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#write_clause.
def enterWrite_clause(self, ctx:PlSqlParser.Write_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#write_clause.
def exitWrite_clause(self, ctx:PlSqlParser.Write_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#rollback_statement.
def enterRollback_statement(self, ctx:PlSqlParser.Rollback_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#rollback_statement.
def exitRollback_statement(self, ctx:PlSqlParser.Rollback_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#savepoint_statement.
def enterSavepoint_statement(self, ctx:PlSqlParser.Savepoint_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#savepoint_statement.
def exitSavepoint_statement(self, ctx:PlSqlParser.Savepoint_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#explain_statement.
def enterExplain_statement(self, ctx:PlSqlParser.Explain_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#explain_statement.
def exitExplain_statement(self, ctx:PlSqlParser.Explain_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#select_statement.
def enterSelect_statement(self, ctx:PlSqlParser.Select_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#select_statement.
def exitSelect_statement(self, ctx:PlSqlParser.Select_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#subquery_factoring_clause.
def enterSubquery_factoring_clause(self, ctx:PlSqlParser.Subquery_factoring_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#subquery_factoring_clause.
def exitSubquery_factoring_clause(self, ctx:PlSqlParser.Subquery_factoring_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#factoring_element.
def enterFactoring_element(self, ctx:PlSqlParser.Factoring_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#factoring_element.
def exitFactoring_element(self, ctx:PlSqlParser.Factoring_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#search_clause.
def enterSearch_clause(self, ctx:PlSqlParser.Search_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#search_clause.
def exitSearch_clause(self, ctx:PlSqlParser.Search_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#cycle_clause.
def enterCycle_clause(self, ctx:PlSqlParser.Cycle_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#cycle_clause.
def exitCycle_clause(self, ctx:PlSqlParser.Cycle_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#subquery.
def enterSubquery(self, ctx:PlSqlParser.SubqueryContext):
pass
# Exit a parse tree produced by PlSqlParser#subquery.
def exitSubquery(self, ctx:PlSqlParser.SubqueryContext):
pass
# Enter a parse tree produced by PlSqlParser#subquery_basic_elements.
def enterSubquery_basic_elements(self, ctx:PlSqlParser.Subquery_basic_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#subquery_basic_elements.
def exitSubquery_basic_elements(self, ctx:PlSqlParser.Subquery_basic_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#subquery_operation_part.
def enterSubquery_operation_part(self, ctx:PlSqlParser.Subquery_operation_partContext):
pass
# Exit a parse tree produced by PlSqlParser#subquery_operation_part.
def exitSubquery_operation_part(self, ctx:PlSqlParser.Subquery_operation_partContext):
pass
# Enter a parse tree produced by PlSqlParser#query_block.
def enterQuery_block(self, ctx:PlSqlParser.Query_blockContext):
pass
# Exit a parse tree produced by PlSqlParser#query_block.
def exitQuery_block(self, ctx:PlSqlParser.Query_blockContext):
pass
# Enter a parse tree produced by PlSqlParser#selected_element.
def enterSelected_element(self, ctx:PlSqlParser.Selected_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#selected_element.
def exitSelected_element(self, ctx:PlSqlParser.Selected_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#from_clause.
def enterFrom_clause(self, ctx:PlSqlParser.From_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#from_clause.
def exitFrom_clause(self, ctx:PlSqlParser.From_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#select_list_elements.
def enterSelect_list_elements(self, ctx:PlSqlParser.Select_list_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#select_list_elements.
def exitSelect_list_elements(self, ctx:PlSqlParser.Select_list_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#table_ref_list.
def enterTable_ref_list(self, ctx:PlSqlParser.Table_ref_listContext):
pass
# Exit a parse tree produced by PlSqlParser#table_ref_list.
def exitTable_ref_list(self, ctx:PlSqlParser.Table_ref_listContext):
pass
# Enter a parse tree produced by PlSqlParser#table_ref.
def enterTable_ref(self, ctx:PlSqlParser.Table_refContext):
pass
# Exit a parse tree produced by PlSqlParser#table_ref.
def exitTable_ref(self, ctx:PlSqlParser.Table_refContext):
pass
# Enter a parse tree produced by PlSqlParser#table_ref_aux.
def enterTable_ref_aux(self, ctx:PlSqlParser.Table_ref_auxContext):
pass
# Exit a parse tree produced by PlSqlParser#table_ref_aux.
def exitTable_ref_aux(self, ctx:PlSqlParser.Table_ref_auxContext):
pass
# Enter a parse tree produced by PlSqlParser#table_ref_aux_internal_one.
def enterTable_ref_aux_internal_one(self, ctx:PlSqlParser.Table_ref_aux_internal_oneContext):
pass
# Exit a parse tree produced by PlSqlParser#table_ref_aux_internal_one.
def exitTable_ref_aux_internal_one(self, ctx:PlSqlParser.Table_ref_aux_internal_oneContext):
pass
# Enter a parse tree produced by PlSqlParser#table_ref_aux_internal_two.
def enterTable_ref_aux_internal_two(self, ctx:PlSqlParser.Table_ref_aux_internal_twoContext):
pass
# Exit a parse tree produced by PlSqlParser#table_ref_aux_internal_two.
def exitTable_ref_aux_internal_two(self, ctx:PlSqlParser.Table_ref_aux_internal_twoContext):
pass
# Enter a parse tree produced by PlSqlParser#table_ref_aux_internal_three.
def enterTable_ref_aux_internal_three(self, ctx:PlSqlParser.Table_ref_aux_internal_threeContext):
pass
# Exit a parse tree produced by PlSqlParser#table_ref_aux_internal_three.
def exitTable_ref_aux_internal_three(self, ctx:PlSqlParser.Table_ref_aux_internal_threeContext):
pass
# Enter a parse tree produced by PlSqlParser#join_clause.
def enterJoin_clause(self, ctx:PlSqlParser.Join_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#join_clause.
def exitJoin_clause(self, ctx:PlSqlParser.Join_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#join_on_part.
def enterJoin_on_part(self, ctx:PlSqlParser.Join_on_partContext):
pass
# Exit a parse tree produced by PlSqlParser#join_on_part.
def exitJoin_on_part(self, ctx:PlSqlParser.Join_on_partContext):
pass
# Enter a parse tree produced by PlSqlParser#join_using_part.
def enterJoin_using_part(self, ctx:PlSqlParser.Join_using_partContext):
pass
# Exit a parse tree produced by PlSqlParser#join_using_part.
def exitJoin_using_part(self, ctx:PlSqlParser.Join_using_partContext):
pass
# Enter a parse tree produced by PlSqlParser#outer_join_type.
def enterOuter_join_type(self, ctx:PlSqlParser.Outer_join_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#outer_join_type.
def exitOuter_join_type(self, ctx:PlSqlParser.Outer_join_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#query_partition_clause.
def enterQuery_partition_clause(self, ctx:PlSqlParser.Query_partition_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#query_partition_clause.
def exitQuery_partition_clause(self, ctx:PlSqlParser.Query_partition_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#flashback_query_clause.
def enterFlashback_query_clause(self, ctx:PlSqlParser.Flashback_query_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#flashback_query_clause.
def exitFlashback_query_clause(self, ctx:PlSqlParser.Flashback_query_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#pivot_clause.
def enterPivot_clause(self, ctx:PlSqlParser.Pivot_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#pivot_clause.
def exitPivot_clause(self, ctx:PlSqlParser.Pivot_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#pivot_element.
def enterPivot_element(self, ctx:PlSqlParser.Pivot_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#pivot_element.
def exitPivot_element(self, ctx:PlSqlParser.Pivot_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#pivot_for_clause.
def enterPivot_for_clause(self, ctx:PlSqlParser.Pivot_for_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#pivot_for_clause.
def exitPivot_for_clause(self, ctx:PlSqlParser.Pivot_for_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#pivot_in_clause.
def enterPivot_in_clause(self, ctx:PlSqlParser.Pivot_in_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#pivot_in_clause.
def exitPivot_in_clause(self, ctx:PlSqlParser.Pivot_in_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#pivot_in_clause_element.
def enterPivot_in_clause_element(self, ctx:PlSqlParser.Pivot_in_clause_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#pivot_in_clause_element.
def exitPivot_in_clause_element(self, ctx:PlSqlParser.Pivot_in_clause_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#pivot_in_clause_elements.
def enterPivot_in_clause_elements(self, ctx:PlSqlParser.Pivot_in_clause_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#pivot_in_clause_elements.
def exitPivot_in_clause_elements(self, ctx:PlSqlParser.Pivot_in_clause_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#unpivot_clause.
def enterUnpivot_clause(self, ctx:PlSqlParser.Unpivot_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#unpivot_clause.
def exitUnpivot_clause(self, ctx:PlSqlParser.Unpivot_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#unpivot_in_clause.
def enterUnpivot_in_clause(self, ctx:PlSqlParser.Unpivot_in_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#unpivot_in_clause.
def exitUnpivot_in_clause(self, ctx:PlSqlParser.Unpivot_in_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#unpivot_in_elements.
def enterUnpivot_in_elements(self, ctx:PlSqlParser.Unpivot_in_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#unpivot_in_elements.
def exitUnpivot_in_elements(self, ctx:PlSqlParser.Unpivot_in_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#hierarchical_query_clause.
def enterHierarchical_query_clause(self, ctx:PlSqlParser.Hierarchical_query_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#hierarchical_query_clause.
def exitHierarchical_query_clause(self, ctx:PlSqlParser.Hierarchical_query_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#start_part.
def enterStart_part(self, ctx:PlSqlParser.Start_partContext):
pass
# Exit a parse tree produced by PlSqlParser#start_part.
def exitStart_part(self, ctx:PlSqlParser.Start_partContext):
pass
# Enter a parse tree produced by PlSqlParser#group_by_clause.
def enterGroup_by_clause(self, ctx:PlSqlParser.Group_by_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#group_by_clause.
def exitGroup_by_clause(self, ctx:PlSqlParser.Group_by_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#group_by_elements.
def enterGroup_by_elements(self, ctx:PlSqlParser.Group_by_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#group_by_elements.
def exitGroup_by_elements(self, ctx:PlSqlParser.Group_by_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#rollup_cube_clause.
def enterRollup_cube_clause(self, ctx:PlSqlParser.Rollup_cube_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#rollup_cube_clause.
def exitRollup_cube_clause(self, ctx:PlSqlParser.Rollup_cube_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#grouping_sets_clause.
def enterGrouping_sets_clause(self, ctx:PlSqlParser.Grouping_sets_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#grouping_sets_clause.
def exitGrouping_sets_clause(self, ctx:PlSqlParser.Grouping_sets_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#grouping_sets_elements.
def enterGrouping_sets_elements(self, ctx:PlSqlParser.Grouping_sets_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#grouping_sets_elements.
def exitGrouping_sets_elements(self, ctx:PlSqlParser.Grouping_sets_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#having_clause.
def enterHaving_clause(self, ctx:PlSqlParser.Having_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#having_clause.
def exitHaving_clause(self, ctx:PlSqlParser.Having_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#model_clause.
def enterModel_clause(self, ctx:PlSqlParser.Model_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#model_clause.
def exitModel_clause(self, ctx:PlSqlParser.Model_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#cell_reference_options.
def enterCell_reference_options(self, ctx:PlSqlParser.Cell_reference_optionsContext):
pass
# Exit a parse tree produced by PlSqlParser#cell_reference_options.
def exitCell_reference_options(self, ctx:PlSqlParser.Cell_reference_optionsContext):
pass
# Enter a parse tree produced by PlSqlParser#return_rows_clause.
def enterReturn_rows_clause(self, ctx:PlSqlParser.Return_rows_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#return_rows_clause.
def exitReturn_rows_clause(self, ctx:PlSqlParser.Return_rows_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#reference_model.
def enterReference_model(self, ctx:PlSqlParser.Reference_modelContext):
pass
# Exit a parse tree produced by PlSqlParser#reference_model.
def exitReference_model(self, ctx:PlSqlParser.Reference_modelContext):
pass
# Enter a parse tree produced by PlSqlParser#main_model.
def enterMain_model(self, ctx:PlSqlParser.Main_modelContext):
pass
# Exit a parse tree produced by PlSqlParser#main_model.
def exitMain_model(self, ctx:PlSqlParser.Main_modelContext):
pass
# Enter a parse tree produced by PlSqlParser#model_column_clauses.
def enterModel_column_clauses(self, ctx:PlSqlParser.Model_column_clausesContext):
pass
# Exit a parse tree produced by PlSqlParser#model_column_clauses.
def exitModel_column_clauses(self, ctx:PlSqlParser.Model_column_clausesContext):
pass
# Enter a parse tree produced by PlSqlParser#model_column_partition_part.
def enterModel_column_partition_part(self, ctx:PlSqlParser.Model_column_partition_partContext):
pass
# Exit a parse tree produced by PlSqlParser#model_column_partition_part.
def exitModel_column_partition_part(self, ctx:PlSqlParser.Model_column_partition_partContext):
pass
# Enter a parse tree produced by PlSqlParser#model_column_list.
def enterModel_column_list(self, ctx:PlSqlParser.Model_column_listContext):
pass
# Exit a parse tree produced by PlSqlParser#model_column_list.
def exitModel_column_list(self, ctx:PlSqlParser.Model_column_listContext):
pass
# Enter a parse tree produced by PlSqlParser#model_column.
def enterModel_column(self, ctx:PlSqlParser.Model_columnContext):
pass
# Exit a parse tree produced by PlSqlParser#model_column.
def exitModel_column(self, ctx:PlSqlParser.Model_columnContext):
pass
# Enter a parse tree produced by PlSqlParser#model_rules_clause.
def enterModel_rules_clause(self, ctx:PlSqlParser.Model_rules_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#model_rules_clause.
def exitModel_rules_clause(self, ctx:PlSqlParser.Model_rules_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#model_rules_part.
def enterModel_rules_part(self, ctx:PlSqlParser.Model_rules_partContext):
pass
# Exit a parse tree produced by PlSqlParser#model_rules_part.
def exitModel_rules_part(self, ctx:PlSqlParser.Model_rules_partContext):
pass
# Enter a parse tree produced by PlSqlParser#model_rules_element.
def enterModel_rules_element(self, ctx:PlSqlParser.Model_rules_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#model_rules_element.
def exitModel_rules_element(self, ctx:PlSqlParser.Model_rules_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#cell_assignment.
def enterCell_assignment(self, ctx:PlSqlParser.Cell_assignmentContext):
pass
# Exit a parse tree produced by PlSqlParser#cell_assignment.
def exitCell_assignment(self, ctx:PlSqlParser.Cell_assignmentContext):
pass
# Enter a parse tree produced by PlSqlParser#model_iterate_clause.
def enterModel_iterate_clause(self, ctx:PlSqlParser.Model_iterate_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#model_iterate_clause.
def exitModel_iterate_clause(self, ctx:PlSqlParser.Model_iterate_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#until_part.
def enterUntil_part(self, ctx:PlSqlParser.Until_partContext):
pass
# Exit a parse tree produced by PlSqlParser#until_part.
def exitUntil_part(self, ctx:PlSqlParser.Until_partContext):
pass
# Enter a parse tree produced by PlSqlParser#order_by_clause.
def enterOrder_by_clause(self, ctx:PlSqlParser.Order_by_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#order_by_clause.
def exitOrder_by_clause(self, ctx:PlSqlParser.Order_by_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#order_by_elements.
def enterOrder_by_elements(self, ctx:PlSqlParser.Order_by_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#order_by_elements.
def exitOrder_by_elements(self, ctx:PlSqlParser.Order_by_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#for_update_clause.
def enterFor_update_clause(self, ctx:PlSqlParser.For_update_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#for_update_clause.
def exitFor_update_clause(self, ctx:PlSqlParser.For_update_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#for_update_of_part.
def enterFor_update_of_part(self, ctx:PlSqlParser.For_update_of_partContext):
pass
# Exit a parse tree produced by PlSqlParser#for_update_of_part.
def exitFor_update_of_part(self, ctx:PlSqlParser.For_update_of_partContext):
pass
# Enter a parse tree produced by PlSqlParser#for_update_options.
def enterFor_update_options(self, ctx:PlSqlParser.For_update_optionsContext):
pass
# Exit a parse tree produced by PlSqlParser#for_update_options.
def exitFor_update_options(self, ctx:PlSqlParser.For_update_optionsContext):
pass
# Enter a parse tree produced by PlSqlParser#update_statement.
def enterUpdate_statement(self, ctx:PlSqlParser.Update_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#update_statement.
def exitUpdate_statement(self, ctx:PlSqlParser.Update_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#update_set_clause.
def enterUpdate_set_clause(self, ctx:PlSqlParser.Update_set_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#update_set_clause.
def exitUpdate_set_clause(self, ctx:PlSqlParser.Update_set_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#column_based_update_set_clause.
def enterColumn_based_update_set_clause(self, ctx:PlSqlParser.Column_based_update_set_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#column_based_update_set_clause.
def exitColumn_based_update_set_clause(self, ctx:PlSqlParser.Column_based_update_set_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#delete_statement.
def enterDelete_statement(self, ctx:PlSqlParser.Delete_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#delete_statement.
def exitDelete_statement(self, ctx:PlSqlParser.Delete_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#insert_statement.
def enterInsert_statement(self, ctx:PlSqlParser.Insert_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#insert_statement.
def exitInsert_statement(self, ctx:PlSqlParser.Insert_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#single_table_insert.
def enterSingle_table_insert(self, ctx:PlSqlParser.Single_table_insertContext):
pass
# Exit a parse tree produced by PlSqlParser#single_table_insert.
def exitSingle_table_insert(self, ctx:PlSqlParser.Single_table_insertContext):
pass
# Enter a parse tree produced by PlSqlParser#multi_table_insert.
def enterMulti_table_insert(self, ctx:PlSqlParser.Multi_table_insertContext):
pass
# Exit a parse tree produced by PlSqlParser#multi_table_insert.
def exitMulti_table_insert(self, ctx:PlSqlParser.Multi_table_insertContext):
pass
# Enter a parse tree produced by PlSqlParser#multi_table_element.
def enterMulti_table_element(self, ctx:PlSqlParser.Multi_table_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#multi_table_element.
def exitMulti_table_element(self, ctx:PlSqlParser.Multi_table_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#conditional_insert_clause.
def enterConditional_insert_clause(self, ctx:PlSqlParser.Conditional_insert_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#conditional_insert_clause.
def exitConditional_insert_clause(self, ctx:PlSqlParser.Conditional_insert_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#conditional_insert_when_part.
def enterConditional_insert_when_part(self, ctx:PlSqlParser.Conditional_insert_when_partContext):
pass
# Exit a parse tree produced by PlSqlParser#conditional_insert_when_part.
def exitConditional_insert_when_part(self, ctx:PlSqlParser.Conditional_insert_when_partContext):
pass
# Enter a parse tree produced by PlSqlParser#conditional_insert_else_part.
def enterConditional_insert_else_part(self, ctx:PlSqlParser.Conditional_insert_else_partContext):
pass
# Exit a parse tree produced by PlSqlParser#conditional_insert_else_part.
def exitConditional_insert_else_part(self, ctx:PlSqlParser.Conditional_insert_else_partContext):
pass
# Enter a parse tree produced by PlSqlParser#insert_into_clause.
def enterInsert_into_clause(self, ctx:PlSqlParser.Insert_into_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#insert_into_clause.
def exitInsert_into_clause(self, ctx:PlSqlParser.Insert_into_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#values_clause.
def enterValues_clause(self, ctx:PlSqlParser.Values_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#values_clause.
def exitValues_clause(self, ctx:PlSqlParser.Values_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#merge_statement.
def enterMerge_statement(self, ctx:PlSqlParser.Merge_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#merge_statement.
def exitMerge_statement(self, ctx:PlSqlParser.Merge_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#merge_update_clause.
def enterMerge_update_clause(self, ctx:PlSqlParser.Merge_update_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#merge_update_clause.
def exitMerge_update_clause(self, ctx:PlSqlParser.Merge_update_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#merge_element.
def enterMerge_element(self, ctx:PlSqlParser.Merge_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#merge_element.
def exitMerge_element(self, ctx:PlSqlParser.Merge_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#merge_update_delete_part.
def enterMerge_update_delete_part(self, ctx:PlSqlParser.Merge_update_delete_partContext):
pass
# Exit a parse tree produced by PlSqlParser#merge_update_delete_part.
def exitMerge_update_delete_part(self, ctx:PlSqlParser.Merge_update_delete_partContext):
pass
# Enter a parse tree produced by PlSqlParser#merge_insert_clause.
def enterMerge_insert_clause(self, ctx:PlSqlParser.Merge_insert_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#merge_insert_clause.
def exitMerge_insert_clause(self, ctx:PlSqlParser.Merge_insert_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#selected_tableview.
def enterSelected_tableview(self, ctx:PlSqlParser.Selected_tableviewContext):
pass
# Exit a parse tree produced by PlSqlParser#selected_tableview.
def exitSelected_tableview(self, ctx:PlSqlParser.Selected_tableviewContext):
pass
# Enter a parse tree produced by PlSqlParser#lock_table_statement.
def enterLock_table_statement(self, ctx:PlSqlParser.Lock_table_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#lock_table_statement.
def exitLock_table_statement(self, ctx:PlSqlParser.Lock_table_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#wait_nowait_part.
def enterWait_nowait_part(self, ctx:PlSqlParser.Wait_nowait_partContext):
pass
# Exit a parse tree produced by PlSqlParser#wait_nowait_part.
def exitWait_nowait_part(self, ctx:PlSqlParser.Wait_nowait_partContext):
pass
# Enter a parse tree produced by PlSqlParser#lock_table_element.
def enterLock_table_element(self, ctx:PlSqlParser.Lock_table_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#lock_table_element.
def exitLock_table_element(self, ctx:PlSqlParser.Lock_table_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#lock_mode.
def enterLock_mode(self, ctx:PlSqlParser.Lock_modeContext):
pass
# Exit a parse tree produced by PlSqlParser#lock_mode.
def exitLock_mode(self, ctx:PlSqlParser.Lock_modeContext):
pass
# Enter a parse tree produced by PlSqlParser#general_table_ref.
def enterGeneral_table_ref(self, ctx:PlSqlParser.General_table_refContext):
pass
# Exit a parse tree produced by PlSqlParser#general_table_ref.
def exitGeneral_table_ref(self, ctx:PlSqlParser.General_table_refContext):
pass
# Enter a parse tree produced by PlSqlParser#static_returning_clause.
def enterStatic_returning_clause(self, ctx:PlSqlParser.Static_returning_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#static_returning_clause.
def exitStatic_returning_clause(self, ctx:PlSqlParser.Static_returning_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#error_logging_clause.
def enterError_logging_clause(self, ctx:PlSqlParser.Error_logging_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#error_logging_clause.
def exitError_logging_clause(self, ctx:PlSqlParser.Error_logging_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#error_logging_into_part.
def enterError_logging_into_part(self, ctx:PlSqlParser.Error_logging_into_partContext):
pass
# Exit a parse tree produced by PlSqlParser#error_logging_into_part.
def exitError_logging_into_part(self, ctx:PlSqlParser.Error_logging_into_partContext):
pass
# Enter a parse tree produced by PlSqlParser#error_logging_reject_part.
def enterError_logging_reject_part(self, ctx:PlSqlParser.Error_logging_reject_partContext):
pass
# Exit a parse tree produced by PlSqlParser#error_logging_reject_part.
def exitError_logging_reject_part(self, ctx:PlSqlParser.Error_logging_reject_partContext):
pass
# Enter a parse tree produced by PlSqlParser#dml_table_expression_clause.
def enterDml_table_expression_clause(self, ctx:PlSqlParser.Dml_table_expression_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#dml_table_expression_clause.
def exitDml_table_expression_clause(self, ctx:PlSqlParser.Dml_table_expression_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#table_collection_expression.
def enterTable_collection_expression(self, ctx:PlSqlParser.Table_collection_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#table_collection_expression.
def exitTable_collection_expression(self, ctx:PlSqlParser.Table_collection_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#subquery_restriction_clause.
def enterSubquery_restriction_clause(self, ctx:PlSqlParser.Subquery_restriction_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#subquery_restriction_clause.
def exitSubquery_restriction_clause(self, ctx:PlSqlParser.Subquery_restriction_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#sample_clause.
def enterSample_clause(self, ctx:PlSqlParser.Sample_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#sample_clause.
def exitSample_clause(self, ctx:PlSqlParser.Sample_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#seed_part.
def enterSeed_part(self, ctx:PlSqlParser.Seed_partContext):
pass
# Exit a parse tree produced by PlSqlParser#seed_part.
def exitSeed_part(self, ctx:PlSqlParser.Seed_partContext):
pass
# Enter a parse tree produced by PlSqlParser#condition.
def enterCondition(self, ctx:PlSqlParser.ConditionContext):
pass
# Exit a parse tree produced by PlSqlParser#condition.
def exitCondition(self, ctx:PlSqlParser.ConditionContext):
pass
# Enter a parse tree produced by PlSqlParser#expressions.
def enterExpressions(self, ctx:PlSqlParser.ExpressionsContext):
pass
# Exit a parse tree produced by PlSqlParser#expressions.
def exitExpressions(self, ctx:PlSqlParser.ExpressionsContext):
pass
# Enter a parse tree produced by PlSqlParser#expression.
def enterExpression(self, ctx:PlSqlParser.ExpressionContext):
pass
# Exit a parse tree produced by PlSqlParser#expression.
def exitExpression(self, ctx:PlSqlParser.ExpressionContext):
pass
# Enter a parse tree produced by PlSqlParser#cursor_expression.
def enterCursor_expression(self, ctx:PlSqlParser.Cursor_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#cursor_expression.
def exitCursor_expression(self, ctx:PlSqlParser.Cursor_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#logical_expression.
def enterLogical_expression(self, ctx:PlSqlParser.Logical_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#logical_expression.
def exitLogical_expression(self, ctx:PlSqlParser.Logical_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#multiset_expression.
def enterMultiset_expression(self, ctx:PlSqlParser.Multiset_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#multiset_expression.
def exitMultiset_expression(self, ctx:PlSqlParser.Multiset_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#relational_expression.
def enterRelational_expression(self, ctx:PlSqlParser.Relational_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#relational_expression.
def exitRelational_expression(self, ctx:PlSqlParser.Relational_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#compound_expression.
def enterCompound_expression(self, ctx:PlSqlParser.Compound_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#compound_expression.
def exitCompound_expression(self, ctx:PlSqlParser.Compound_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#relational_operator.
def enterRelational_operator(self, ctx:PlSqlParser.Relational_operatorContext):
pass
# Exit a parse tree produced by PlSqlParser#relational_operator.
def exitRelational_operator(self, ctx:PlSqlParser.Relational_operatorContext):
pass
# Enter a parse tree produced by PlSqlParser#in_elements.
def enterIn_elements(self, ctx:PlSqlParser.In_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#in_elements.
def exitIn_elements(self, ctx:PlSqlParser.In_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#between_elements.
def enterBetween_elements(self, ctx:PlSqlParser.Between_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#between_elements.
def exitBetween_elements(self, ctx:PlSqlParser.Between_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#concatenation.
def enterConcatenation(self, ctx:PlSqlParser.ConcatenationContext):
pass
# Exit a parse tree produced by PlSqlParser#concatenation.
def exitConcatenation(self, ctx:PlSqlParser.ConcatenationContext):
pass
# Enter a parse tree produced by PlSqlParser#interval_expression.
def enterInterval_expression(self, ctx:PlSqlParser.Interval_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#interval_expression.
def exitInterval_expression(self, ctx:PlSqlParser.Interval_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#model_expression.
def enterModel_expression(self, ctx:PlSqlParser.Model_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#model_expression.
def exitModel_expression(self, ctx:PlSqlParser.Model_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#model_expression_element.
def enterModel_expression_element(self, ctx:PlSqlParser.Model_expression_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#model_expression_element.
def exitModel_expression_element(self, ctx:PlSqlParser.Model_expression_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#single_column_for_loop.
def enterSingle_column_for_loop(self, ctx:PlSqlParser.Single_column_for_loopContext):
pass
# Exit a parse tree produced by PlSqlParser#single_column_for_loop.
def exitSingle_column_for_loop(self, ctx:PlSqlParser.Single_column_for_loopContext):
pass
# Enter a parse tree produced by PlSqlParser#multi_column_for_loop.
def enterMulti_column_for_loop(self, ctx:PlSqlParser.Multi_column_for_loopContext):
pass
# Exit a parse tree produced by PlSqlParser#multi_column_for_loop.
def exitMulti_column_for_loop(self, ctx:PlSqlParser.Multi_column_for_loopContext):
pass
# Enter a parse tree produced by PlSqlParser#unary_expression.
def enterUnary_expression(self, ctx:PlSqlParser.Unary_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#unary_expression.
def exitUnary_expression(self, ctx:PlSqlParser.Unary_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#case_statement.
def enterCase_statement(self, ctx:PlSqlParser.Case_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#case_statement.
def exitCase_statement(self, ctx:PlSqlParser.Case_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#simple_case_statement.
def enterSimple_case_statement(self, ctx:PlSqlParser.Simple_case_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#simple_case_statement.
def exitSimple_case_statement(self, ctx:PlSqlParser.Simple_case_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#simple_case_when_part.
def enterSimple_case_when_part(self, ctx:PlSqlParser.Simple_case_when_partContext):
pass
# Exit a parse tree produced by PlSqlParser#simple_case_when_part.
def exitSimple_case_when_part(self, ctx:PlSqlParser.Simple_case_when_partContext):
pass
# Enter a parse tree produced by PlSqlParser#searched_case_statement.
def enterSearched_case_statement(self, ctx:PlSqlParser.Searched_case_statementContext):
pass
# Exit a parse tree produced by PlSqlParser#searched_case_statement.
def exitSearched_case_statement(self, ctx:PlSqlParser.Searched_case_statementContext):
pass
# Enter a parse tree produced by PlSqlParser#searched_case_when_part.
def enterSearched_case_when_part(self, ctx:PlSqlParser.Searched_case_when_partContext):
pass
# Exit a parse tree produced by PlSqlParser#searched_case_when_part.
def exitSearched_case_when_part(self, ctx:PlSqlParser.Searched_case_when_partContext):
pass
# Enter a parse tree produced by PlSqlParser#case_else_part.
def enterCase_else_part(self, ctx:PlSqlParser.Case_else_partContext):
pass
# Exit a parse tree produced by PlSqlParser#case_else_part.
def exitCase_else_part(self, ctx:PlSqlParser.Case_else_partContext):
pass
# Enter a parse tree produced by PlSqlParser#atom.
def enterAtom(self, ctx:PlSqlParser.AtomContext):
pass
# Exit a parse tree produced by PlSqlParser#atom.
def exitAtom(self, ctx:PlSqlParser.AtomContext):
pass
# Enter a parse tree produced by PlSqlParser#quantified_expression.
def enterQuantified_expression(self, ctx:PlSqlParser.Quantified_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#quantified_expression.
def exitQuantified_expression(self, ctx:PlSqlParser.Quantified_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#string_function.
def enterString_function(self, ctx:PlSqlParser.String_functionContext):
pass
# Exit a parse tree produced by PlSqlParser#string_function.
def exitString_function(self, ctx:PlSqlParser.String_functionContext):
pass
# Enter a parse tree produced by PlSqlParser#standard_function.
def enterStandard_function(self, ctx:PlSqlParser.Standard_functionContext):
pass
# Exit a parse tree produced by PlSqlParser#standard_function.
def exitStandard_function(self, ctx:PlSqlParser.Standard_functionContext):
pass
# Enter a parse tree produced by PlSqlParser#numeric_function_wrapper.
def enterNumeric_function_wrapper(self, ctx:PlSqlParser.Numeric_function_wrapperContext):
pass
# Exit a parse tree produced by PlSqlParser#numeric_function_wrapper.
def exitNumeric_function_wrapper(self, ctx:PlSqlParser.Numeric_function_wrapperContext):
pass
# Enter a parse tree produced by PlSqlParser#numeric_function.
def enterNumeric_function(self, ctx:PlSqlParser.Numeric_functionContext):
pass
# Exit a parse tree produced by PlSqlParser#numeric_function.
def exitNumeric_function(self, ctx:PlSqlParser.Numeric_functionContext):
pass
# Enter a parse tree produced by PlSqlParser#other_function.
def enterOther_function(self, ctx:PlSqlParser.Other_functionContext):
pass
# Exit a parse tree produced by PlSqlParser#other_function.
def exitOther_function(self, ctx:PlSqlParser.Other_functionContext):
pass
# Enter a parse tree produced by PlSqlParser#over_clause_keyword.
def enterOver_clause_keyword(self, ctx:PlSqlParser.Over_clause_keywordContext):
pass
# Exit a parse tree produced by PlSqlParser#over_clause_keyword.
def exitOver_clause_keyword(self, ctx:PlSqlParser.Over_clause_keywordContext):
pass
# Enter a parse tree produced by PlSqlParser#within_or_over_clause_keyword.
def enterWithin_or_over_clause_keyword(self, ctx:PlSqlParser.Within_or_over_clause_keywordContext):
pass
# Exit a parse tree produced by PlSqlParser#within_or_over_clause_keyword.
def exitWithin_or_over_clause_keyword(self, ctx:PlSqlParser.Within_or_over_clause_keywordContext):
pass
# Enter a parse tree produced by PlSqlParser#standard_prediction_function_keyword.
def enterStandard_prediction_function_keyword(self, ctx:PlSqlParser.Standard_prediction_function_keywordContext):
pass
# Exit a parse tree produced by PlSqlParser#standard_prediction_function_keyword.
def exitStandard_prediction_function_keyword(self, ctx:PlSqlParser.Standard_prediction_function_keywordContext):
pass
# Enter a parse tree produced by PlSqlParser#over_clause.
def enterOver_clause(self, ctx:PlSqlParser.Over_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#over_clause.
def exitOver_clause(self, ctx:PlSqlParser.Over_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#windowing_clause.
def enterWindowing_clause(self, ctx:PlSqlParser.Windowing_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#windowing_clause.
def exitWindowing_clause(self, ctx:PlSqlParser.Windowing_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#windowing_type.
def enterWindowing_type(self, ctx:PlSqlParser.Windowing_typeContext):
pass
# Exit a parse tree produced by PlSqlParser#windowing_type.
def exitWindowing_type(self, ctx:PlSqlParser.Windowing_typeContext):
pass
# Enter a parse tree produced by PlSqlParser#windowing_elements.
def enterWindowing_elements(self, ctx:PlSqlParser.Windowing_elementsContext):
pass
# Exit a parse tree produced by PlSqlParser#windowing_elements.
def exitWindowing_elements(self, ctx:PlSqlParser.Windowing_elementsContext):
pass
# Enter a parse tree produced by PlSqlParser#using_clause.
def enterUsing_clause(self, ctx:PlSqlParser.Using_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#using_clause.
def exitUsing_clause(self, ctx:PlSqlParser.Using_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#using_element.
def enterUsing_element(self, ctx:PlSqlParser.Using_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#using_element.
def exitUsing_element(self, ctx:PlSqlParser.Using_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#collect_order_by_part.
def enterCollect_order_by_part(self, ctx:PlSqlParser.Collect_order_by_partContext):
pass
# Exit a parse tree produced by PlSqlParser#collect_order_by_part.
def exitCollect_order_by_part(self, ctx:PlSqlParser.Collect_order_by_partContext):
pass
# Enter a parse tree produced by PlSqlParser#within_or_over_part.
def enterWithin_or_over_part(self, ctx:PlSqlParser.Within_or_over_partContext):
pass
# Exit a parse tree produced by PlSqlParser#within_or_over_part.
def exitWithin_or_over_part(self, ctx:PlSqlParser.Within_or_over_partContext):
pass
# Enter a parse tree produced by PlSqlParser#cost_matrix_clause.
def enterCost_matrix_clause(self, ctx:PlSqlParser.Cost_matrix_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#cost_matrix_clause.
def exitCost_matrix_clause(self, ctx:PlSqlParser.Cost_matrix_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_passing_clause.
def enterXml_passing_clause(self, ctx:PlSqlParser.Xml_passing_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_passing_clause.
def exitXml_passing_clause(self, ctx:PlSqlParser.Xml_passing_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_attributes_clause.
def enterXml_attributes_clause(self, ctx:PlSqlParser.Xml_attributes_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_attributes_clause.
def exitXml_attributes_clause(self, ctx:PlSqlParser.Xml_attributes_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_namespaces_clause.
def enterXml_namespaces_clause(self, ctx:PlSqlParser.Xml_namespaces_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_namespaces_clause.
def exitXml_namespaces_clause(self, ctx:PlSqlParser.Xml_namespaces_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_table_column.
def enterXml_table_column(self, ctx:PlSqlParser.Xml_table_columnContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_table_column.
def exitXml_table_column(self, ctx:PlSqlParser.Xml_table_columnContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_general_default_part.
def enterXml_general_default_part(self, ctx:PlSqlParser.Xml_general_default_partContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_general_default_part.
def exitXml_general_default_part(self, ctx:PlSqlParser.Xml_general_default_partContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_multiuse_expression_element.
def enterXml_multiuse_expression_element(self, ctx:PlSqlParser.Xml_multiuse_expression_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_multiuse_expression_element.
def exitXml_multiuse_expression_element(self, ctx:PlSqlParser.Xml_multiuse_expression_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#xmlroot_param_version_part.
def enterXmlroot_param_version_part(self, ctx:PlSqlParser.Xmlroot_param_version_partContext):
pass
# Exit a parse tree produced by PlSqlParser#xmlroot_param_version_part.
def exitXmlroot_param_version_part(self, ctx:PlSqlParser.Xmlroot_param_version_partContext):
pass
# Enter a parse tree produced by PlSqlParser#xmlroot_param_standalone_part.
def enterXmlroot_param_standalone_part(self, ctx:PlSqlParser.Xmlroot_param_standalone_partContext):
pass
# Exit a parse tree produced by PlSqlParser#xmlroot_param_standalone_part.
def exitXmlroot_param_standalone_part(self, ctx:PlSqlParser.Xmlroot_param_standalone_partContext):
pass
# Enter a parse tree produced by PlSqlParser#xmlserialize_param_enconding_part.
def enterXmlserialize_param_enconding_part(self, ctx:PlSqlParser.Xmlserialize_param_enconding_partContext):
pass
# Exit a parse tree produced by PlSqlParser#xmlserialize_param_enconding_part.
def exitXmlserialize_param_enconding_part(self, ctx:PlSqlParser.Xmlserialize_param_enconding_partContext):
pass
# Enter a parse tree produced by PlSqlParser#xmlserialize_param_version_part.
def enterXmlserialize_param_version_part(self, ctx:PlSqlParser.Xmlserialize_param_version_partContext):
pass
# Exit a parse tree produced by PlSqlParser#xmlserialize_param_version_part.
def exitXmlserialize_param_version_part(self, ctx:PlSqlParser.Xmlserialize_param_version_partContext):
pass
# Enter a parse tree produced by PlSqlParser#xmlserialize_param_ident_part.
def enterXmlserialize_param_ident_part(self, ctx:PlSqlParser.Xmlserialize_param_ident_partContext):
pass
# Exit a parse tree produced by PlSqlParser#xmlserialize_param_ident_part.
def exitXmlserialize_param_ident_part(self, ctx:PlSqlParser.Xmlserialize_param_ident_partContext):
pass
# Enter a parse tree produced by PlSqlParser#sql_plus_command.
def enterSql_plus_command(self, ctx:PlSqlParser.Sql_plus_commandContext):
pass
# Exit a parse tree produced by PlSqlParser#sql_plus_command.
def exitSql_plus_command(self, ctx:PlSqlParser.Sql_plus_commandContext):
pass
# Enter a parse tree produced by PlSqlParser#whenever_command.
def enterWhenever_command(self, ctx:PlSqlParser.Whenever_commandContext):
pass
# Exit a parse tree produced by PlSqlParser#whenever_command.
def exitWhenever_command(self, ctx:PlSqlParser.Whenever_commandContext):
pass
# Enter a parse tree produced by PlSqlParser#set_command.
def enterSet_command(self, ctx:PlSqlParser.Set_commandContext):
pass
# Exit a parse tree produced by PlSqlParser#set_command.
def exitSet_command(self, ctx:PlSqlParser.Set_commandContext):
pass
# Enter a parse tree produced by PlSqlParser#partition_extension_clause.
def enterPartition_extension_clause(self, ctx:PlSqlParser.Partition_extension_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#partition_extension_clause.
def exitPartition_extension_clause(self, ctx:PlSqlParser.Partition_extension_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#column_alias.
def enterColumn_alias(self, ctx:PlSqlParser.Column_aliasContext):
pass
# Exit a parse tree produced by PlSqlParser#column_alias.
def exitColumn_alias(self, ctx:PlSqlParser.Column_aliasContext):
pass
# Enter a parse tree produced by PlSqlParser#table_alias.
def enterTable_alias(self, ctx:PlSqlParser.Table_aliasContext):
pass
# Exit a parse tree produced by PlSqlParser#table_alias.
def exitTable_alias(self, ctx:PlSqlParser.Table_aliasContext):
pass
# Enter a parse tree produced by PlSqlParser#where_clause.
def enterWhere_clause(self, ctx:PlSqlParser.Where_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#where_clause.
def exitWhere_clause(self, ctx:PlSqlParser.Where_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#into_clause.
def enterInto_clause(self, ctx:PlSqlParser.Into_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#into_clause.
def exitInto_clause(self, ctx:PlSqlParser.Into_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#xml_column_name.
def enterXml_column_name(self, ctx:PlSqlParser.Xml_column_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#xml_column_name.
def exitXml_column_name(self, ctx:PlSqlParser.Xml_column_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#cost_class_name.
def enterCost_class_name(self, ctx:PlSqlParser.Cost_class_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#cost_class_name.
def exitCost_class_name(self, ctx:PlSqlParser.Cost_class_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#attribute_name.
def enterAttribute_name(self, ctx:PlSqlParser.Attribute_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#attribute_name.
def exitAttribute_name(self, ctx:PlSqlParser.Attribute_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#savepoint_name.
def enterSavepoint_name(self, ctx:PlSqlParser.Savepoint_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#savepoint_name.
def exitSavepoint_name(self, ctx:PlSqlParser.Savepoint_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#rollback_segment_name.
def enterRollback_segment_name(self, ctx:PlSqlParser.Rollback_segment_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#rollback_segment_name.
def exitRollback_segment_name(self, ctx:PlSqlParser.Rollback_segment_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#table_var_name.
def enterTable_var_name(self, ctx:PlSqlParser.Table_var_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#table_var_name.
def exitTable_var_name(self, ctx:PlSqlParser.Table_var_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#schema_name.
def enterSchema_name(self, ctx:PlSqlParser.Schema_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#schema_name.
def exitSchema_name(self, ctx:PlSqlParser.Schema_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#routine_name.
def enterRoutine_name(self, ctx:PlSqlParser.Routine_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#routine_name.
def exitRoutine_name(self, ctx:PlSqlParser.Routine_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#package_name.
def enterPackage_name(self, ctx:PlSqlParser.Package_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#package_name.
def exitPackage_name(self, ctx:PlSqlParser.Package_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#implementation_type_name.
def enterImplementation_type_name(self, ctx:PlSqlParser.Implementation_type_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#implementation_type_name.
def exitImplementation_type_name(self, ctx:PlSqlParser.Implementation_type_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#parameter_name.
def enterParameter_name(self, ctx:PlSqlParser.Parameter_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#parameter_name.
def exitParameter_name(self, ctx:PlSqlParser.Parameter_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#reference_model_name.
def enterReference_model_name(self, ctx:PlSqlParser.Reference_model_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#reference_model_name.
def exitReference_model_name(self, ctx:PlSqlParser.Reference_model_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#main_model_name.
def enterMain_model_name(self, ctx:PlSqlParser.Main_model_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#main_model_name.
def exitMain_model_name(self, ctx:PlSqlParser.Main_model_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#container_tableview_name.
def enterContainer_tableview_name(self, ctx:PlSqlParser.Container_tableview_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#container_tableview_name.
def exitContainer_tableview_name(self, ctx:PlSqlParser.Container_tableview_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#aggregate_function_name.
def enterAggregate_function_name(self, ctx:PlSqlParser.Aggregate_function_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#aggregate_function_name.
def exitAggregate_function_name(self, ctx:PlSqlParser.Aggregate_function_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#query_name.
def enterQuery_name(self, ctx:PlSqlParser.Query_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#query_name.
def exitQuery_name(self, ctx:PlSqlParser.Query_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#grantee_name.
def enterGrantee_name(self, ctx:PlSqlParser.Grantee_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#grantee_name.
def exitGrantee_name(self, ctx:PlSqlParser.Grantee_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#role_name.
def enterRole_name(self, ctx:PlSqlParser.Role_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#role_name.
def exitRole_name(self, ctx:PlSqlParser.Role_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#constraint_name.
def enterConstraint_name(self, ctx:PlSqlParser.Constraint_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#constraint_name.
def exitConstraint_name(self, ctx:PlSqlParser.Constraint_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#label_name.
def enterLabel_name(self, ctx:PlSqlParser.Label_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#label_name.
def exitLabel_name(self, ctx:PlSqlParser.Label_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#type_name.
def enterType_name(self, ctx:PlSqlParser.Type_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#type_name.
def exitType_name(self, ctx:PlSqlParser.Type_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#sequence_name.
def enterSequence_name(self, ctx:PlSqlParser.Sequence_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#sequence_name.
def exitSequence_name(self, ctx:PlSqlParser.Sequence_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#exception_name.
def enterException_name(self, ctx:PlSqlParser.Exception_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#exception_name.
def exitException_name(self, ctx:PlSqlParser.Exception_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#function_name.
def enterFunction_name(self, ctx:PlSqlParser.Function_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#function_name.
def exitFunction_name(self, ctx:PlSqlParser.Function_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#procedure_name.
def enterProcedure_name(self, ctx:PlSqlParser.Procedure_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#procedure_name.
def exitProcedure_name(self, ctx:PlSqlParser.Procedure_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#trigger_name.
def enterTrigger_name(self, ctx:PlSqlParser.Trigger_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#trigger_name.
def exitTrigger_name(self, ctx:PlSqlParser.Trigger_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#variable_name.
def enterVariable_name(self, ctx:PlSqlParser.Variable_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#variable_name.
def exitVariable_name(self, ctx:PlSqlParser.Variable_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#index_name.
def enterIndex_name(self, ctx:PlSqlParser.Index_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#index_name.
def exitIndex_name(self, ctx:PlSqlParser.Index_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#cursor_name.
def enterCursor_name(self, ctx:PlSqlParser.Cursor_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#cursor_name.
def exitCursor_name(self, ctx:PlSqlParser.Cursor_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#record_name.
def enterRecord_name(self, ctx:PlSqlParser.Record_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#record_name.
def exitRecord_name(self, ctx:PlSqlParser.Record_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#collection_name.
def enterCollection_name(self, ctx:PlSqlParser.Collection_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#collection_name.
def exitCollection_name(self, ctx:PlSqlParser.Collection_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#link_name.
def enterLink_name(self, ctx:PlSqlParser.Link_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#link_name.
def exitLink_name(self, ctx:PlSqlParser.Link_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#column_name.
def enterColumn_name(self, ctx:PlSqlParser.Column_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#column_name.
def exitColumn_name(self, ctx:PlSqlParser.Column_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#tableview_name.
def enterTableview_name(self, ctx:PlSqlParser.Tableview_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#tableview_name.
def exitTableview_name(self, ctx:PlSqlParser.Tableview_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#char_set_name.
def enterChar_set_name(self, ctx:PlSqlParser.Char_set_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#char_set_name.
def exitChar_set_name(self, ctx:PlSqlParser.Char_set_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#synonym_name.
def enterSynonym_name(self, ctx:PlSqlParser.Synonym_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#synonym_name.
def exitSynonym_name(self, ctx:PlSqlParser.Synonym_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#schema_object_name.
def enterSchema_object_name(self, ctx:PlSqlParser.Schema_object_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#schema_object_name.
def exitSchema_object_name(self, ctx:PlSqlParser.Schema_object_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#dir_object_name.
def enterDir_object_name(self, ctx:PlSqlParser.Dir_object_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#dir_object_name.
def exitDir_object_name(self, ctx:PlSqlParser.Dir_object_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#user_object_name.
def enterUser_object_name(self, ctx:PlSqlParser.User_object_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#user_object_name.
def exitUser_object_name(self, ctx:PlSqlParser.User_object_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#grant_object_name.
def enterGrant_object_name(self, ctx:PlSqlParser.Grant_object_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#grant_object_name.
def exitGrant_object_name(self, ctx:PlSqlParser.Grant_object_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#column_list.
def enterColumn_list(self, ctx:PlSqlParser.Column_listContext):
pass
# Exit a parse tree produced by PlSqlParser#column_list.
def exitColumn_list(self, ctx:PlSqlParser.Column_listContext):
pass
# Enter a parse tree produced by PlSqlParser#paren_column_list.
def enterParen_column_list(self, ctx:PlSqlParser.Paren_column_listContext):
pass
# Exit a parse tree produced by PlSqlParser#paren_column_list.
def exitParen_column_list(self, ctx:PlSqlParser.Paren_column_listContext):
pass
# Enter a parse tree produced by PlSqlParser#keep_clause.
def enterKeep_clause(self, ctx:PlSqlParser.Keep_clauseContext):
pass
# Exit a parse tree produced by PlSqlParser#keep_clause.
def exitKeep_clause(self, ctx:PlSqlParser.Keep_clauseContext):
pass
# Enter a parse tree produced by PlSqlParser#function_argument.
def enterFunction_argument(self, ctx:PlSqlParser.Function_argumentContext):
pass
# Exit a parse tree produced by PlSqlParser#function_argument.
def exitFunction_argument(self, ctx:PlSqlParser.Function_argumentContext):
pass
# Enter a parse tree produced by PlSqlParser#function_argument_analytic.
def enterFunction_argument_analytic(self, ctx:PlSqlParser.Function_argument_analyticContext):
pass
# Exit a parse tree produced by PlSqlParser#function_argument_analytic.
def exitFunction_argument_analytic(self, ctx:PlSqlParser.Function_argument_analyticContext):
pass
# Enter a parse tree produced by PlSqlParser#function_argument_modeling.
def enterFunction_argument_modeling(self, ctx:PlSqlParser.Function_argument_modelingContext):
pass
# Exit a parse tree produced by PlSqlParser#function_argument_modeling.
def exitFunction_argument_modeling(self, ctx:PlSqlParser.Function_argument_modelingContext):
pass
# Enter a parse tree produced by PlSqlParser#respect_or_ignore_nulls.
def enterRespect_or_ignore_nulls(self, ctx:PlSqlParser.Respect_or_ignore_nullsContext):
pass
# Exit a parse tree produced by PlSqlParser#respect_or_ignore_nulls.
def exitRespect_or_ignore_nulls(self, ctx:PlSqlParser.Respect_or_ignore_nullsContext):
pass
# Enter a parse tree produced by PlSqlParser#argument.
def enterArgument(self, ctx:PlSqlParser.ArgumentContext):
pass
# Exit a parse tree produced by PlSqlParser#argument.
def exitArgument(self, ctx:PlSqlParser.ArgumentContext):
pass
# Enter a parse tree produced by PlSqlParser#type_spec.
def enterType_spec(self, ctx:PlSqlParser.Type_specContext):
pass
# Exit a parse tree produced by PlSqlParser#type_spec.
def exitType_spec(self, ctx:PlSqlParser.Type_specContext):
pass
# Enter a parse tree produced by PlSqlParser#datatype.
def enterDatatype(self, ctx:PlSqlParser.DatatypeContext):
pass
# Exit a parse tree produced by PlSqlParser#datatype.
def exitDatatype(self, ctx:PlSqlParser.DatatypeContext):
pass
# Enter a parse tree produced by PlSqlParser#precision_part.
def enterPrecision_part(self, ctx:PlSqlParser.Precision_partContext):
pass
# Exit a parse tree produced by PlSqlParser#precision_part.
def exitPrecision_part(self, ctx:PlSqlParser.Precision_partContext):
pass
# Enter a parse tree produced by PlSqlParser#native_datatype_element.
def enterNative_datatype_element(self, ctx:PlSqlParser.Native_datatype_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#native_datatype_element.
def exitNative_datatype_element(self, ctx:PlSqlParser.Native_datatype_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#bind_variable.
def enterBind_variable(self, ctx:PlSqlParser.Bind_variableContext):
pass
# Exit a parse tree produced by PlSqlParser#bind_variable.
def exitBind_variable(self, ctx:PlSqlParser.Bind_variableContext):
pass
# Enter a parse tree produced by PlSqlParser#general_element.
def enterGeneral_element(self, ctx:PlSqlParser.General_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#general_element.
def exitGeneral_element(self, ctx:PlSqlParser.General_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#general_element_part.
def enterGeneral_element_part(self, ctx:PlSqlParser.General_element_partContext):
pass
# Exit a parse tree produced by PlSqlParser#general_element_part.
def exitGeneral_element_part(self, ctx:PlSqlParser.General_element_partContext):
pass
# Enter a parse tree produced by PlSqlParser#table_element.
def enterTable_element(self, ctx:PlSqlParser.Table_elementContext):
pass
# Exit a parse tree produced by PlSqlParser#table_element.
def exitTable_element(self, ctx:PlSqlParser.Table_elementContext):
pass
# Enter a parse tree produced by PlSqlParser#object_privilege.
def enterObject_privilege(self, ctx:PlSqlParser.Object_privilegeContext):
pass
# Exit a parse tree produced by PlSqlParser#object_privilege.
def exitObject_privilege(self, ctx:PlSqlParser.Object_privilegeContext):
pass
# Enter a parse tree produced by PlSqlParser#system_privilege.
def enterSystem_privilege(self, ctx:PlSqlParser.System_privilegeContext):
pass
# Exit a parse tree produced by PlSqlParser#system_privilege.
def exitSystem_privilege(self, ctx:PlSqlParser.System_privilegeContext):
pass
# Enter a parse tree produced by PlSqlParser#constant.
def enterConstant(self, ctx:PlSqlParser.ConstantContext):
pass
# Exit a parse tree produced by PlSqlParser#constant.
def exitConstant(self, ctx:PlSqlParser.ConstantContext):
pass
# Enter a parse tree produced by PlSqlParser#numeric.
def enterNumeric(self, ctx:PlSqlParser.NumericContext):
pass
# Exit a parse tree produced by PlSqlParser#numeric.
def exitNumeric(self, ctx:PlSqlParser.NumericContext):
pass
# Enter a parse tree produced by PlSqlParser#numeric_negative.
def enterNumeric_negative(self, ctx:PlSqlParser.Numeric_negativeContext):
pass
# Exit a parse tree produced by PlSqlParser#numeric_negative.
def exitNumeric_negative(self, ctx:PlSqlParser.Numeric_negativeContext):
pass
# Enter a parse tree produced by PlSqlParser#quoted_string.
def enterQuoted_string(self, ctx:PlSqlParser.Quoted_stringContext):
pass
# Exit a parse tree produced by PlSqlParser#quoted_string.
def exitQuoted_string(self, ctx:PlSqlParser.Quoted_stringContext):
pass
# Enter a parse tree produced by PlSqlParser#identifier.
def enterIdentifier(self, ctx:PlSqlParser.IdentifierContext):
pass
# Exit a parse tree produced by PlSqlParser#identifier.
def exitIdentifier(self, ctx:PlSqlParser.IdentifierContext):
pass
# Enter a parse tree produced by PlSqlParser#id_expression.
def enterId_expression(self, ctx:PlSqlParser.Id_expressionContext):
pass
# Exit a parse tree produced by PlSqlParser#id_expression.
def exitId_expression(self, ctx:PlSqlParser.Id_expressionContext):
pass
# Enter a parse tree produced by PlSqlParser#outer_join_sign.
def enterOuter_join_sign(self, ctx:PlSqlParser.Outer_join_signContext):
pass
# Exit a parse tree produced by PlSqlParser#outer_join_sign.
def exitOuter_join_sign(self, ctx:PlSqlParser.Outer_join_signContext):
pass
# Enter a parse tree produced by PlSqlParser#regular_id.
def enterRegular_id(self, ctx:PlSqlParser.Regular_idContext):
pass
# Exit a parse tree produced by PlSqlParser#regular_id.
def exitRegular_id(self, ctx:PlSqlParser.Regular_idContext):
pass
# Enter a parse tree produced by PlSqlParser#string_function_name.
def enterString_function_name(self, ctx:PlSqlParser.String_function_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#string_function_name.
def exitString_function_name(self, ctx:PlSqlParser.String_function_nameContext):
pass
# Enter a parse tree produced by PlSqlParser#numeric_function_name.
def enterNumeric_function_name(self, ctx:PlSqlParser.Numeric_function_nameContext):
pass
# Exit a parse tree produced by PlSqlParser#numeric_function_name.
def exitNumeric_function_name(self, ctx:PlSqlParser.Numeric_function_nameContext):
pass
| 36.195028
| 133
| 0.763739
| 21,720
| 177,609
| 5.998481
| 0.047238
| 0.050151
| 0.083585
| 0.150452
| 0.910528
| 0.903275
| 0.903037
| 0.787062
| 0.780546
| 0.426927
| 0
| 0.000083
| 0.181021
| 177,609
| 4,906
| 134
| 36.202405
| 0.895619
| 0.374322
| 0
| 0.498625
| 1
| 0
| 0.000009
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.498625
| false
| 0.500458
| 0.001375
| 0
| 0.500458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
dcdf013084e1129b29cf14d1b44402d09e9c8eb7
| 15,260
|
py
|
Python
|
accelbyte_py_sdk/api/group/wrappers/_group_member.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/group/wrappers/_group_member.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/group/wrappers/_group_member.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import HeaderStr
from ....core import get_namespace as get_services_namespace
from ....core import run_request
from ....core import run_request_async
from ....core import same_doc_as
from ..models import ModelsGetGroupMemberListResponseV1
from ..models import ModelsGetUserGroupInformationResponseV1
from ..models import ModelsJoinGroupResponseV1
from ..models import ModelsKickGroupMemberResponseV1
from ..models import ModelsLeaveGroupResponseV1
from ..models import ModelsMemberRequestGroupResponseV1
from ..models import ModelsUserInvitationResponseV1
from ..models import ResponseErrorResponse
from ..operations.group_member import AcceptGroupInvitationPublicV1
from ..operations.group_member import AcceptGroupJoinRequestPublicV1
from ..operations.group_member import CancelGroupJoinRequestV1
from ..operations.group_member import GetGroupMembersListAdminV1
from ..operations.group_member import GetGroupMembersListPublicV1
from ..operations.group_member import GetUserGroupInformationPublicV1
from ..operations.group_member import InviteGroupPublicV1
from ..operations.group_member import JoinGroupV1
from ..operations.group_member import KickGroupMemberPublicV1
from ..operations.group_member import LeaveGroupPublicV1
from ..operations.group_member import RejectGroupInvitationPublicV1
from ..operations.group_member import RejectGroupJoinRequestPublicV1
@same_doc_as(AcceptGroupInvitationPublicV1)
def accept_group_invitation_public_v1(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AcceptGroupInvitationPublicV1.create(
group_id=group_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(AcceptGroupInvitationPublicV1)
async def accept_group_invitation_public_v1_async(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AcceptGroupInvitationPublicV1.create(
group_id=group_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(AcceptGroupJoinRequestPublicV1)
def accept_group_join_request_public_v1(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AcceptGroupJoinRequestPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(AcceptGroupJoinRequestPublicV1)
async def accept_group_join_request_public_v1_async(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AcceptGroupJoinRequestPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CancelGroupJoinRequestV1)
def cancel_group_join_request_v1(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CancelGroupJoinRequestV1.create(
group_id=group_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CancelGroupJoinRequestV1)
async def cancel_group_join_request_v1_async(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CancelGroupJoinRequestV1.create(
group_id=group_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroupMembersListAdminV1)
def get_group_members_list_admin_v1(group_id: str, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroupMembersListAdminV1.create(
group_id=group_id,
limit=limit,
offset=offset,
order=order,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroupMembersListAdminV1)
async def get_group_members_list_admin_v1_async(group_id: str, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroupMembersListAdminV1.create(
group_id=group_id,
limit=limit,
offset=offset,
order=order,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroupMembersListPublicV1)
def get_group_members_list_public_v1(group_id: str, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroupMembersListPublicV1.create(
group_id=group_id,
limit=limit,
offset=offset,
order=order,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroupMembersListPublicV1)
async def get_group_members_list_public_v1_async(group_id: str, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroupMembersListPublicV1.create(
group_id=group_id,
limit=limit,
offset=offset,
order=order,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetUserGroupInformationPublicV1)
def get_user_group_information_public_v1(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetUserGroupInformationPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetUserGroupInformationPublicV1)
async def get_user_group_information_public_v1_async(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetUserGroupInformationPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(InviteGroupPublicV1)
def invite_group_public_v1(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = InviteGroupPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(InviteGroupPublicV1)
async def invite_group_public_v1_async(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = InviteGroupPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(JoinGroupV1)
def join_group_v1(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = JoinGroupV1.create(
group_id=group_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(JoinGroupV1)
async def join_group_v1_async(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = JoinGroupV1.create(
group_id=group_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(KickGroupMemberPublicV1)
def kick_group_member_public_v1(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = KickGroupMemberPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(KickGroupMemberPublicV1)
async def kick_group_member_public_v1_async(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = KickGroupMemberPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(LeaveGroupPublicV1)
def leave_group_public_v1(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = LeaveGroupPublicV1.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(LeaveGroupPublicV1)
async def leave_group_public_v1_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = LeaveGroupPublicV1.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(RejectGroupInvitationPublicV1)
def reject_group_invitation_public_v1(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = RejectGroupInvitationPublicV1.create(
group_id=group_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(RejectGroupInvitationPublicV1)
async def reject_group_invitation_public_v1_async(group_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = RejectGroupInvitationPublicV1.create(
group_id=group_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(RejectGroupJoinRequestPublicV1)
def reject_group_join_request_public_v1(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = RejectGroupJoinRequestPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(RejectGroupJoinRequestPublicV1)
async def reject_group_join_request_public_v1_async(user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = RejectGroupJoinRequestPublicV1.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
| 40.693333
| 250
| 0.734404
| 1,775
| 15,260
| 6.057465
| 0.072113
| 0.113839
| 0.080357
| 0.053571
| 0.861235
| 0.815662
| 0.803385
| 0.78497
| 0.776228
| 0.776228
| 0
| 0.007583
| 0.17903
| 15,260
| 374
| 251
| 40.802139
| 0.850655
| 0.050197
| 0
| 0.753333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.086667
| 0
| 0.286667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0d19d85d6471b0491005066660990b9ba68be74f
| 2,309
|
py
|
Python
|
test_ImageProcessing/test_ImgFunctions.py
|
mdholbrook/bme590final
|
e226114009f2d43052fb82a9850d3b82d09e0ad7
|
[
"MIT"
] | 1
|
2018-11-27T21:09:53.000Z
|
2018-11-27T21:09:53.000Z
|
test_ImageProcessing/test_ImgFunctions.py
|
mdholbrook/bme590final
|
e226114009f2d43052fb82a9850d3b82d09e0ad7
|
[
"MIT"
] | 41
|
2018-11-27T21:11:09.000Z
|
2018-12-15T01:33:05.000Z
|
test_ImageProcessing/test_ImgFunctions.py
|
mdholbrook/bme590final
|
e226114009f2d43052fb82a9850d3b82d09e0ad7
|
[
"MIT"
] | null | null | null |
import pytest
from ImageProcessing.ImgFunctions import *
from skimage import io
@pytest.mark.parametrize("filename,expected", [
("TestImages/Lenna.png", 2),
("TestImages/circlesBrightDark.png", 2),
("TestImages/yellowlily.jpg", 2),
("TestImages/tire.tif", 2),
("TestImages/coins.png", 2),
("TestImages/foosball.jpg", 2),
])
def test_histogram_eq(filename, expected):
img = io.imread(filename)
test = histogram_eq(img)
assert test.shape[0] == img.shape[0]
assert test.shape[1] == img.shape[1]
@pytest.mark.parametrize("filename,expected", [
("TestImages/Lenna.png", 2),
("TestImages/circlesBrightDark.png", 2),
("TestImages/yellowlily.jpg", 2),
("TestImages/tire.tif", 2),
("TestImages/coins.png", 2),
])
def test_contrast_stretching(filename, expected):
img = io.imread(filename)
test = contrast_stretching(img)
assert test.shape[0] == img.shape[0]
assert test.shape[1] == img.shape[1]
@pytest.mark.parametrize("filename,expected", [
("TestImages/Lenna.png", 2),
("TestImages/circlesBrightDark.png", 2),
("TestImages/yellowlily.jpg", 2),
("TestImages/tire.tif", 2),
("TestImages/coins.png", 2),
])
def test_log_compression(filename, expected):
img = io.imread(filename)
test = log_compression(img)
assert test.shape[0] == img.shape[0]
assert test.shape[1] == img.shape[1]
@pytest.mark.parametrize("filename,expected", [
("TestImages/Lenna.png", 2),
("TestImages/circlesBrightDark.png", 2),
("TestImages/yellowlily.jpg", 2),
("TestImages/tire.tif", 2),
("TestImages/coins.png", 2),
("TestImages/foosball.jpg", 2),
])
def test_reverse_video(filename, expected):
img = io.imread(filename)
test = reverse_video(img)
assert test.shape[0] == img.shape[0]
assert test.shape[1] == img.shape[1]
@pytest.mark.parametrize("filename,expected", [
("TestImages/Lenna.png", 2),
("TestImages/circlesBrightDark.png", 2),
("TestImages/yellowlily.jpg", 2),
("TestImages/tire.tif", 2),
("TestImages/coins.png", 2),
("TestImages/foosball.jpg", 2),
])
def test_gamma_correction(filename, expected):
img = io.imread(filename)
test = gamma_correction(img)
assert test.shape[0] == img.shape[0]
assert test.shape[1] == img.shape[1]
| 29.602564
| 49
| 0.660026
| 287
| 2,309
| 5.25784
| 0.139373
| 0.167661
| 0.12061
| 0.09609
| 0.86216
| 0.86216
| 0.86216
| 0.732936
| 0.732936
| 0.732936
| 0
| 0.024742
| 0.159809
| 2,309
| 77
| 50
| 29.987013
| 0.753093
| 0
| 0
| 0.80303
| 0
| 0
| 0.317887
| 0.153313
| 0
| 0
| 0
| 0
| 0.151515
| 1
| 0.075758
| false
| 0
| 0.045455
| 0
| 0.121212
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d39f6aa538f1ebd04b80f9f2d9d0cdedd68ea96
| 130
|
py
|
Python
|
src/prefect/tasks/control_flow/__init__.py
|
louisditzel/prefect
|
b1a02fee623b965e756a38aa09059db780ab67eb
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-10T14:32:32.000Z
|
2020-05-10T14:32:32.000Z
|
src/prefect/tasks/control_flow/__init__.py
|
louisditzel/prefect
|
b1a02fee623b965e756a38aa09059db780ab67eb
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/prefect/tasks/control_flow/__init__.py
|
louisditzel/prefect
|
b1a02fee623b965e756a38aa09059db780ab67eb
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from prefect.tasks.control_flow.conditional import ifelse, switch, merge
from prefect.tasks.control_flow.filter import FilterTask
| 43.333333
| 72
| 0.861538
| 18
| 130
| 6.111111
| 0.666667
| 0.2
| 0.290909
| 0.418182
| 0.490909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 130
| 2
| 73
| 65
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b4b06042af63a06b62a94601ff314190a04b8f85
| 550
|
py
|
Python
|
temboo/core/Library/Parse/Roles/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Parse/Roles/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Parse/Roles/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Parse.Roles.CreateRole import CreateRole, CreateRoleInputSet, CreateRoleResultSet, CreateRoleChoreographyExecution
from temboo.Library.Parse.Roles.DeleteRole import DeleteRole, DeleteRoleInputSet, DeleteRoleResultSet, DeleteRoleChoreographyExecution
from temboo.Library.Parse.Roles.RetrieveRole import RetrieveRole, RetrieveRoleInputSet, RetrieveRoleResultSet, RetrieveRoleChoreographyExecution
from temboo.Library.Parse.Roles.UpdateRole import UpdateRole, UpdateRoleInputSet, UpdateRoleResultSet, UpdateRoleChoreographyExecution
| 110
| 144
| 0.898182
| 44
| 550
| 11.227273
| 0.5
| 0.080972
| 0.137652
| 0.178138
| 0.218623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050909
| 550
| 4
| 145
| 137.5
| 0.94636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b4eed58f05b21e4b5ff66d2bb7d82e3cf8516e5b
| 432
|
py
|
Python
|
temboo/core/Library/Utilities/HTTP/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Utilities/HTTP/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Utilities/HTTP/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Utilities.HTTP.Delete import Delete, DeleteInputSet, DeleteResultSet, DeleteChoreographyExecution
from temboo.Library.Utilities.HTTP.Get import Get, GetInputSet, GetResultSet, GetChoreographyExecution
from temboo.Library.Utilities.HTTP.Post import Post, PostInputSet, PostResultSet, PostChoreographyExecution
from temboo.Library.Utilities.HTTP.Put import Put, PutInputSet, PutResultSet, PutChoreographyExecution
| 86.4
| 117
| 0.87037
| 44
| 432
| 8.545455
| 0.5
| 0.106383
| 0.180851
| 0.276596
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064815
| 432
| 4
| 118
| 108
| 0.930693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b4f43e9c58e9be9a10ebdec33d794d2dde91627d
| 22,348
|
py
|
Python
|
abnorm_tests/tests/test_fields.py
|
tsouvarev/django-abnorm
|
5bd2a788c52d7103a6c2a2444579f07228153642
|
[
"MIT"
] | null | null | null |
abnorm_tests/tests/test_fields.py
|
tsouvarev/django-abnorm
|
5bd2a788c52d7103a6c2a2444579f07228153642
|
[
"MIT"
] | null | null | null |
abnorm_tests/tests/test_fields.py
|
tsouvarev/django-abnorm
|
5bd2a788c52d7103a6c2a2444579f07228153642
|
[
"MIT"
] | null | null | null |
from unittest import skipIf, skipUnless
import django
from django.test import TestCase
from .models import (
TestObj, RelatedTestObj, NullRelatedTestObj, GenericRelatedTestObj,
M2MTestObj, TestParentObj,
)
from abnorm.utils import reload_model_instance
from abnorm.adapters import this_django
class FKRelationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.test_obj2 = TestObj.objects.create()
self.fm0 = RelatedTestObj.objects.create(
value=0, test_obj=self.test_obj)
self.fm1 = RelatedTestObj.objects.create(
value=1, test_obj=self.test_obj)
self.fm2 = RelatedTestObj.objects.create(
value=2, test_obj=self.test_obj)
self.fm3 = RelatedTestObj.objects.create(
value=4, test_obj=self.test_obj)
self.fm4 = RelatedTestObj.objects.create(
value=8, test_obj=self.test_obj2)
self.test_obj = reload_model_instance(self.test_obj)
self.test_obj2 = reload_model_instance(self.test_obj2)
def test_count_field(self):
self.assertEqual(self.test_obj.rto_items_count, 4)
self.assertEqual(self.test_obj2.rto_items_count, 1)
self.fm4.test_obj = self.test_obj
self.fm4.save()
self.test_obj = reload_model_instance(self.test_obj)
self.test_obj2 = reload_model_instance(self.test_obj2)
self.assertEqual(self.test_obj.rto_items_count, 5)
self.assertEqual(self.test_obj2.rto_items_count, 0)
def test_count_field_with_qs_filter_value_change(self):
self.fm5 = RelatedTestObj.objects.create(
value=1, test_obj=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_qsf_count, 2)
self.fm5.value = 666
self.fm5.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_qsf_count, 1)
def test_count_field_with_qs_filter_both_val_and_relation_change(self):
self.fm5 = RelatedTestObj.objects.create(
value=1, test_obj=self.test_obj)
self.fm5.test_obj = self.test_obj2
self.fm5.save()
self.test_obj = reload_model_instance(self.test_obj)
self.test_obj2 = reload_model_instance(self.test_obj2)
self.assertEqual(self.test_obj.rto_items_qsf_count, 1)
self.assertEqual(self.test_obj2.rto_items_qsf_count, 1)
def test_sum_field(self):
self.assertEqual(self.test_obj.rto_item_values_sum, 7)
def test_sum_field_for_relation_with_default_relation_name(self):
RelatedTestObj.objects.create(
value=17, test_obj_wo_related_name=self.test_obj,
test_obj=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(
self.test_obj.rto_with_default_related_name_item_values_sum, 17)
def test_first_item_field(self):
self.assertEqual(self.test_obj.rto_first_item, self.fm0)
def test_first_2_items_field(self):
self.assertEqual(self.test_obj.rto_first_2_items,
[self.fm0, self.fm1])
def test_related_field_has_correct_fk_value(self):
self.assertEqual(
self.test_obj.rto_first_item.test_obj.pk, self.test_obj.pk)
def test_altering_rto_first_item_attr_updates_itself(self):
denormalized_item = self.test_obj.rto_first_item
denormalized_item.value = 666
self.assertEqual(denormalized_item.test_obj, self.test_obj)
denormalized_item.save() # just like if we got it from original model
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_first_item.value, 666)
class GenericRelationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.test_obj2 = TestObj.objects.create()
self.fm0 = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
self.fm1 = GenericRelatedTestObj.objects.create(
value=1, content_object=self.test_obj)
self.fm2 = GenericRelatedTestObj.objects.create(
value=2, content_object=self.test_obj)
self.fm3 = GenericRelatedTestObj.objects.create(
value=4, content_object=self.test_obj)
self.fm4 = GenericRelatedTestObj.objects.create(
value=8, content_object=self.test_obj2)
self.test_obj = reload_model_instance(self.test_obj)
self.test_obj2 = reload_model_instance(self.test_obj2)
def test_count_field(self):
self.assertEqual(self.test_obj.grto_items_count, 4)
def test_sum_field(self):
self.assertEqual(self.test_obj.grto_item_values_sum, 7)
def test_first_item_field(self):
self.assertEqual(self.test_obj.grto_first_item, self.fm0)
def test_first_2_items_field(self):
self.assertEqual(self.test_obj.grto_first_2_items,
[self.fm0, self.fm1])
class M2MRelationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.test_obj2 = TestObj.objects.create()
self.fm0 = M2MTestObj.objects.create(value=0)
self.fm1 = M2MTestObj.objects.create(value=1)
self.fm2 = M2MTestObj.objects.create(value=2)
self.fm3 = M2MTestObj.objects.create(value=4)
self.fm4 = M2MTestObj.objects.create(value=8)
self.fm_unused = M2MTestObj.objects.create(value=16)
self.test_obj.m2m_items.add(self.fm0)
self.test_obj.m2m_items.add(self.fm1)
self.test_obj.m2m_items.add(self.fm2)
self.test_obj.m2m_items.add(self.fm3)
self.test_obj2.m2m_items.add(self.fm4)
self.test_obj = reload_model_instance(self.test_obj)
self.test_obj2 = reload_model_instance(self.test_obj2)
def test_count_field(self):
self.assertEqual(self.test_obj.m2m_items_count, 4)
def test_sum_field(self):
self.assertEqual(self.test_obj.m2m_item_values_sum, 7)
def test_first_item_field(self):
self.assertEqual(self.test_obj.m2m_first_item, self.fm0)
def test_first_2_items_field(self):
self.assertEqual(self.test_obj.m2m_first_2_items,
[self.fm0, self.fm1])
class PostUpdateTestCase(TestCase):
def setUp(self):
self.test_grand_parent = TestParentObj.objects.create()
self.test_parent = TestParentObj.objects.create(
parent=self.test_grand_parent)
self.test_obj = TestObj.objects.create(parent=self.test_parent)
self.fm0 = M2MTestObj.objects.create(value=0)
self.fm1 = M2MTestObj.objects.create(value=1)
self.test_obj.m2m_items.add(self.fm0)
self.test_obj.m2m_items.add(self.fm1)
self.test_grand_parent = reload_model_instance(self.test_grand_parent)
self.test_parent = reload_model_instance(self.test_parent)
self.test_obj = reload_model_instance(self.test_obj)
def test_all_test_objs_field(self):
# post_save fired
self.assertEqual(self.test_parent.all_test_objs, [self.test_obj])
# m2m fired
self.assertEqual(self.test_obj.m2m_first_2_items, [self.fm0, self.fm1])
# post_update fired
self.assertEqual(self.test_parent.all_test_objs[0].m2m_first_2_items,
self.test_obj.m2m_first_2_items)
# post_update fired up above
self.assertEqual(self.test_grand_parent.all_children,
[self.test_parent])
# Got abnormed data too
self.assertEqual(
(self.test_grand_parent.all_children[0]
.all_test_objs[0].m2m_first_2_items),
self.test_obj.m2m_first_2_items)
def test_m2m_relation_updates_denorm_on_save(self):
# prove initial data
self.assertEqual(
(self.test_grand_parent
.all_children[0]
.all_test_objs[0]
.m2m_first_2_items[0]
.value),
0)
# trigger update of m2m object
self.fm0.value = 999
self.fm0.save()
# prove all date updated
self.test_grand_parent = reload_model_instance(self.test_grand_parent)
self.assertEqual(
(self.test_grand_parent
.all_children[0]
.all_test_objs[0]
.m2m_first_2_items[0].value),
999)
class GRPostUpdateTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.grto = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj
)
self.m2m = M2MTestObj.objects.create(value=0)
self.grto.m2m_items.add(self.m2m)
self.grto = reload_model_instance(self.grto)
self.test_obj = reload_model_instance(self.test_obj)
def test_rto_post_update(self):
# proves old value is 0
self.assertEqual(
self.test_obj.grto_first_item.m2m_first_item.value, 0)
# trigger denormalized data update
self.m2m.value = 999
self.m2m.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(
self.test_obj.grto_first_item.m2m_first_item.value, 999)
class RTOCountDenormalizationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.another_test_obj = TestObj.objects.create()
def test_zero_for_new_items(self):
self.assertEqual(self.test_obj.rto_items_count, 0)
def test_updated_after_replacing(self):
RelatedTestObj.objects.create(value=0, test_obj=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 1)
self.test_obj.rto_items.create(value=1)
self.test_obj.rto_items.create(value=2)
self.test_obj.rto_items.create(value=3)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 4)
@skipIf(django.VERSION >= (1, 9), 'django<1.9 feature')
def test_updated_after_adding_pre_19(self):
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.add(
RelatedTestObj(value=0)
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 3)
@skipUnless(django.VERSION >= (1, 9), 'django1.9+ feature')
def test_updated_after_adding_since_19(self):
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.add(
RelatedTestObj(value=0), bulk=False
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 3)
# no test_updated_after_removing as related manager for fk with null=False
# has no 'remove' method
def test_updated_after_clearing(self):
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.create(value=0)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 2)
this_django.m2m_set(self.test_obj, 'rto_items', [])
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 2)
def test_updated_after_adding_and_resaving(self):
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.create(value=0)
self.test_obj.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 2)
def test_updated_after_changing_relation(self):
rto = RelatedTestObj.objects.create(value=0, test_obj=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 1)
rto.test_obj = self.another_test_obj
rto.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 0)
self.another_test_obj = reload_model_instance(self.another_test_obj)
self.assertEqual(self.another_test_obj.rto_items_count, 1)
class NullRTOCountDenormalizationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.another_test_obj = TestObj.objects.create()
def test_zero_for_new_items(self):
self.assertEqual(self.test_obj.nrto_items_count, 0)
def test_updated_after_replacing(self):
NullRelatedTestObj.objects.create(value=0, test_obj=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 1)
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.create(value=0)
self.test_obj.rto_items.create(value=0)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.rto_items_count, 3)
def test_updated_after_adding(self):
this_django.m2m_set(
self.test_obj,
'nrto_items',
[
NullRelatedTestObj.objects.create(
value=0, test_obj=self.test_obj),
NullRelatedTestObj.objects.create(
value=0, test_obj=self.test_obj)
]
)
self.test_obj.nrto_items.add(
NullRelatedTestObj.objects.create(value=0, test_obj=self.test_obj))
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 3)
def test_updated_after_removing(self):
item1 = NullRelatedTestObj.objects.create(
value=0, test_obj=self.test_obj)
item2 = NullRelatedTestObj.objects.create(
value=0, test_obj=self.test_obj)
this_django.m2m_set(self.test_obj, 'nrto_items', [item1, item2])
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 2)
self.test_obj.nrto_items.remove(item1)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 1)
def test_updated_after_clearing(self):
self.test_obj.nrto_items.create(value=0)
self.test_obj.nrto_items.create(value=0)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 2)
this_django.m2m_set(self.test_obj, 'nrto_items', [])
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 0)
def test_updated_after_adding_and_resaving(self):
self.test_obj.nrto_items.create(value=0)
self.test_obj.nrto_items.create(value=0)
self.test_obj.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 2)
def test_updated_after_changing_relation(self):
nrto = NullRelatedTestObj.objects.create(
value=0, test_obj=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 1)
nrto.test_obj = self.another_test_obj
nrto.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.nrto_items_count, 0)
self.another_test_obj = reload_model_instance(self.another_test_obj)
self.assertEqual(self.another_test_obj.nrto_items_count, 1)
class GRTOCountDenormalizationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.another_test_obj = TestObj.objects.create()
def test_zero_for_new_items(self):
self.assertEqual(self.test_obj.grto_items_count, 0)
def test_updated_after_replacing(self):
this_django.m2m_set(
self.test_obj,
'grto_items',
[
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj),
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj),
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj),
]
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 3)
def test_updated_after_adding(self):
this_django.m2m_set(
self.test_obj,
'grto_items',
[
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj),
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
]
)
self.test_obj.grto_items.add(
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 3)
def test_updated_after_removing(self):
grto1 = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
grto2 = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
this_django.m2m_set(self.test_obj, 'grto_items', [grto1, grto2])
self.test_obj.grto_items.remove(grto1)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 1)
def test_updated_after_clearing(self):
this_django.m2m_set(
self.test_obj,
'grto_items',
[
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj),
GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
]
)
this_django.m2m_set(self.test_obj, 'grto_items', [])
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 0)
def test_updated_after_adding_and_resaving(self):
grto1 = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
grto2 = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
this_django.m2m_set(self.test_obj, 'grto_items', [grto1, grto2])
self.test_obj.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 2)
def test_updated_after_changing_relation(self):
grto = GenericRelatedTestObj.objects.create(
value=0, content_object=self.test_obj)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 1)
grto.content_object = self.another_test_obj
grto.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.grto_items_count, 0)
self.another_test_obj = reload_model_instance(self.another_test_obj)
self.assertEqual(self.another_test_obj.grto_items_count, 1)
class M2MCountDenormalizationTestCase(TestCase):
def setUp(self):
self.test_obj = TestObj.objects.create()
self.another_test_obj = TestObj.objects.create()
def test_zero_for_new_items(self):
self.assertEqual(self.test_obj.m2m_items_count, 0)
def test_updated_after_replacing(self):
this_django.m2m_set(
self.test_obj,
'm2m_items',
[
M2MTestObj.objects.create(value=0),
M2MTestObj.objects.create(value=0),
M2MTestObj.objects.create(value=0),
]
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.m2m_items_count, 3)
def test_updated_after_adding(self):
this_django.m2m_set(
self.test_obj,
'm2m_items',
[
M2MTestObj.objects.create(value=0),
M2MTestObj.objects.create(value=0),
]
)
self.test_obj.m2m_items.add(
M2MTestObj.objects.create(value=0),
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.m2m_items_count, 3)
def test_updated_after_removing(self):
m2m1 = M2MTestObj.objects.create(value=0)
m2m2 = M2MTestObj.objects.create(value=0)
this_django.m2m_set(
self.test_obj,
'm2m_items',
[m2m1, m2m2]
)
self.test_obj.m2m_items.remove(m2m1)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.m2m_items_count, 1)
def test_updated_after_clearing(self):
this_django.m2m_set(
self.test_obj,
'm2m_items',
[
M2MTestObj.objects.create(value=0),
M2MTestObj.objects.create(value=0),
]
)
this_django.m2m_set(
self.test_obj,
'm2m_items',
[]
)
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.m2m_items_count, 0)
def test_updated_after_adding_and_resaving(self):
m2m1 = M2MTestObj.objects.create(value=0)
m2m2 = M2MTestObj.objects.create(value=0)
this_django.m2m_set(
self.test_obj,
'm2m_items',
[m2m1, m2m2]
)
self.test_obj.save()
self.test_obj = reload_model_instance(self.test_obj)
self.assertEqual(self.test_obj.m2m_items_count, 2)
class DropCascadeTestCase(TestCase):
def setUp(self):
self.parent = TestParentObj.objects.create()
self.child = TestParentObj.objects.create(parent=self.parent)
def test_doesnt_fail_on_broken_refs(self):
# see DenormalizedFieldMixin.update_value_by try/except block comments
self.parent.delete()
| 36.938843
| 79
| 0.672499
| 2,939
| 22,348
| 4.790745
| 0.059884
| 0.142188
| 0.192188
| 0.107813
| 0.846449
| 0.821591
| 0.791051
| 0.768892
| 0.753977
| 0.728906
| 0
| 0.021569
| 0.234473
| 22,348
| 604
| 80
| 37
| 0.801438
| 0.019062
| 0
| 0.603004
| 0
| 0
| 0.008627
| 0
| 0
| 0
| 0
| 0
| 0.150215
| 1
| 0.124464
| false
| 0
| 0.012876
| 0
| 0.158798
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37244fac31f7fe383cdb55e9c908cdd66ac71479
| 4,235
|
py
|
Python
|
authors/apps/articles/tests/test_like_dislike.py
|
andela/ah-django-unchained
|
a4e5f6cd11fdc0b9422020693ac1200b849cf0f3
|
[
"BSD-3-Clause"
] | null | null | null |
authors/apps/articles/tests/test_like_dislike.py
|
andela/ah-django-unchained
|
a4e5f6cd11fdc0b9422020693ac1200b849cf0f3
|
[
"BSD-3-Clause"
] | 26
|
2019-01-07T14:22:05.000Z
|
2019-02-28T17:11:48.000Z
|
authors/apps/articles/tests/test_like_dislike.py
|
andela/ah-django-unchained
|
a4e5f6cd11fdc0b9422020693ac1200b849cf0f3
|
[
"BSD-3-Clause"
] | 3
|
2019-09-19T22:16:09.000Z
|
2019-10-16T21:16:16.000Z
|
from django.urls import reverse
from rest_framework import status
from authors.apps.articles.tests.like_dislike_base_test import LikeDislike
class LikeDislikeTest(LikeDislike):
"""Test like or dislike articles."""
def test_like_article(self):
"""Test like an article."""
response = self.like_article()
self.assertEqual(response.data['likes_count'], 1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_dislike_article(self):
"""Test dislike an article."""
response = self.dislike_article()
self.assertEqual(response.data['dislikes_count'], 1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_like_already_liked_article(self):
"""Test like already liked article."""
slug = self.create_new_article()
token = self.signup_user_one(self.user_signup_data2)
self.client.put(
reverse('articles:likes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
response = self.client.put(
reverse('articles:likes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
self.assertEqual(response.data['likes_count'], 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_like_an_article_that_is_disliked(self):
"""Test like an article that has already been disliked."""
slug = self.create_new_article()
token = self.signup_user_one(self.user_signup_data2)
self.client.put(
reverse('articles:dislikes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
response = self.client.put(
reverse('articles:likes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
self.assertEqual(response.data['likes_count'], 1)
self.assertEqual(response.data['dislikes_count'], 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_dislike_article_already_liked(self):
"""Test dislike article that has already been liked."""
slug = self.create_new_article()
token = self.signup_user_one(self.user_signup_data2)
self.client.put(
reverse('articles:likes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
response = self.client.put(
reverse('articles:dislikes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
self.assertEqual(response.data['likes_count'], 0)
self.assertEqual(response.data['dislikes_count'], 1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_dislike_article_already_disliked(self):
"""Test dislike article that has already been disliked."""
slug = self.create_new_article()
token = self.signup_user_one(self.user_signup_data2)
self.client.put(
reverse('articles:dislikes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
response = self.client.put(
reverse('articles:dislikes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
self.assertEqual(response.data['likes_count'], 0)
self.assertEqual(response.data['dislikes_count'], 0)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_rejects_like_unexisting_article(self):
"""Test rejects liking unexisting article."""
slug = "myslug"
token = self.signup_user_one(self.user_signup_data2)
response = self.client.put(
reverse('articles:likes', kwargs={'slug': slug}),
format='json',
HTTP_AUTHORIZATION='token {}'.format(token))
self.assertEqual(response.data['detail'],
'An article with this slug does not exist')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| 44.114583
| 74
| 0.642503
| 483
| 4,235
| 5.426501
| 0.142857
| 0.097291
| 0.14918
| 0.103014
| 0.84319
| 0.801984
| 0.801984
| 0.785578
| 0.764594
| 0.748951
| 0
| 0.010743
| 0.230697
| 4,235
| 95
| 75
| 44.578947
| 0.793738
| 0.072255
| 0
| 0.779221
| 0
| 0
| 0.114484
| 0
| 0
| 0
| 0
| 0
| 0.220779
| 1
| 0.090909
| false
| 0
| 0.038961
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2eb7ab446d415077cce46fa1087333dd306d40d8
| 3,912
|
py
|
Python
|
src/tests/test_utils_evtable.py
|
reddcoin-project/ReddConnect
|
5c212683de6b80b81fd15ed05239c3a1b46c3afd
|
[
"BSD-3-Clause"
] | 5
|
2015-01-30T08:47:59.000Z
|
2022-01-22T19:27:03.000Z
|
src/tests/test_utils_evtable.py
|
reddcoin-project/ReddConnect
|
5c212683de6b80b81fd15ed05239c3a1b46c3afd
|
[
"BSD-3-Clause"
] | 2
|
2017-12-28T21:36:48.000Z
|
2017-12-28T21:36:57.000Z
|
src/tests/test_utils_evtable.py
|
reddcoin-project/ReddConnect
|
5c212683de6b80b81fd15ed05239c3a1b46c3afd
|
[
"BSD-3-Clause"
] | 1
|
2019-01-05T15:51:37.000Z
|
2019-01-05T15:51:37.000Z
|
import unittest
class TestMakeIter(unittest.TestCase):
def test_make_iter(self):
# self.assertEqual(expected, make_iter(obj))
assert True # TODO: implement your test here
class TestWrap(unittest.TestCase):
def test_wrap(self):
# self.assertEqual(expected, wrap(text, width, **kwargs))
assert True # TODO: implement your test here
class TestFill(unittest.TestCase):
def test_fill(self):
# self.assertEqual(expected, fill(text, width, **kwargs))
assert True # TODO: implement your test here
class TestCell(unittest.TestCase):
def test___init__(self):
# cell = Cell(data, **kwargs)
assert True # TODO: implement your test here
def test___str__(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.__str__())
assert True # TODO: implement your test here
def test___unicode__(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.__unicode__())
assert True # TODO: implement your test here
def test_get(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.get())
assert True # TODO: implement your test here
def test_get_height(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.get_height())
assert True # TODO: implement your test here
def test_get_min_height(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.get_min_height())
assert True # TODO: implement your test here
def test_get_min_width(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.get_min_width())
assert True # TODO: implement your test here
def test_get_width(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.get_width())
assert True # TODO: implement your test here
def test_reformat(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.reformat(**kwargs))
assert True # TODO: implement your test here
def test_replace_data(self):
# cell = Cell(data, **kwargs)
# self.assertEqual(expected, cell.replace_data(data, **kwargs))
assert True # TODO: implement your test here
class TestEvTable(unittest.TestCase):
def test___init__(self):
# ev_table = EvTable(*args, **kwargs)
assert True # TODO: implement your test here
def test___str__(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.__str__())
assert True # TODO: implement your test here
def test___unicode__(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.__unicode__())
assert True # TODO: implement your test here
def test_add_column(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.add_column(*args, **kwargs))
assert True # TODO: implement your test here
def test_add_header(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.add_header(*args, **kwargs))
assert True # TODO: implement your test here
def test_add_row(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.add_row(*args, **kwargs))
assert True # TODO: implement your test here
def test_get(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.get())
assert True # TODO: implement your test here
def test_reformat(self):
# ev_table = EvTable(*args, **kwargs)
# self.assertEqual(expected, ev_table.reformat(**kwargs))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
| 35.563636
| 74
| 0.642382
| 476
| 3,912
| 5.056723
| 0.102941
| 0.061072
| 0.122144
| 0.200665
| 0.864977
| 0.861654
| 0.830079
| 0.830079
| 0.813461
| 0.690902
| 0
| 0
| 0.244376
| 3,912
| 109
| 75
| 35.889908
| 0.814276
| 0.561094
| 0
| 0.62
| 0
| 0
| 0.004825
| 0
| 0
| 0
| 0
| 0.009174
| 0.42
| 1
| 0.42
| false
| 0
| 0.02
| 0
| 0.54
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2c024b5b03795f99bea617f6cb1f7d7a5c1df3a6
| 20,799
|
py
|
Python
|
sdk/python/pulumi_azure/logicapps/integration_account_partner.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/logicapps/integration_account_partner.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/logicapps/integration_account_partner.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['IntegrationAccountPartnerArgs', 'IntegrationAccountPartner']
@pulumi.input_type
class IntegrationAccountPartnerArgs:
def __init__(__self__, *,
business_identities: pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]],
integration_account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
metadata: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a IntegrationAccountPartner resource.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]] business_identities: A `business_identity` block as documented below.
:param pulumi.Input[str] integration_account_name: The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] metadata: A JSON mapping of any Metadata for this Logic App Integration Account Partner.
:param pulumi.Input[str] name: The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
"""
pulumi.set(__self__, "business_identities", business_identities)
pulumi.set(__self__, "integration_account_name", integration_account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="businessIdentities")
def business_identities(self) -> pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]]:
"""
A `business_identity` block as documented below.
"""
return pulumi.get(self, "business_identities")
@business_identities.setter
def business_identities(self, value: pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]]):
pulumi.set(self, "business_identities", value)
@property
@pulumi.getter(name="integrationAccountName")
def integration_account_name(self) -> pulumi.Input[str]:
"""
The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "integration_account_name")
@integration_account_name.setter
def integration_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "integration_account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input[str]]:
"""
A JSON mapping of any Metadata for this Logic App Integration Account Partner.
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _IntegrationAccountPartnerState:
def __init__(__self__, *,
business_identities: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]]] = None,
integration_account_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IntegrationAccountPartner resources.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]] business_identities: A `business_identity` block as documented below.
:param pulumi.Input[str] integration_account_name: The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] metadata: A JSON mapping of any Metadata for this Logic App Integration Account Partner.
:param pulumi.Input[str] name: The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
"""
if business_identities is not None:
pulumi.set(__self__, "business_identities", business_identities)
if integration_account_name is not None:
pulumi.set(__self__, "integration_account_name", integration_account_name)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
@property
@pulumi.getter(name="businessIdentities")
def business_identities(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]]]:
"""
A `business_identity` block as documented below.
"""
return pulumi.get(self, "business_identities")
@business_identities.setter
def business_identities(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationAccountPartnerBusinessIdentityArgs']]]]):
pulumi.set(self, "business_identities", value)
@property
@pulumi.getter(name="integrationAccountName")
def integration_account_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "integration_account_name")
@integration_account_name.setter
def integration_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "integration_account_name", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input[str]]:
"""
A JSON mapping of any Metadata for this Logic App Integration Account Partner.
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
class IntegrationAccountPartner(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
business_identities: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IntegrationAccountPartnerBusinessIdentityArgs']]]]] = None,
integration_account_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Logic App Integration Account Partner.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_integration_account = azure.logicapps.IntegrationAccount("exampleIntegrationAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="Standard")
example_integration_account_partner = azure.logicapps.IntegrationAccountPartner("exampleIntegrationAccountPartner",
resource_group_name=example_resource_group.name,
integration_account_name=example_integration_account.name,
business_identities=[azure.logicapps.IntegrationAccountPartnerBusinessIdentityArgs(
qualifier="ZZ",
value="AA",
)])
```
## Import
Logic App Integration Account Partners can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:logicapps/integrationAccountPartner:IntegrationAccountPartner example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Logic/integrationAccounts/account1/partners/partner1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IntegrationAccountPartnerBusinessIdentityArgs']]]] business_identities: A `business_identity` block as documented below.
:param pulumi.Input[str] integration_account_name: The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] metadata: A JSON mapping of any Metadata for this Logic App Integration Account Partner.
:param pulumi.Input[str] name: The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IntegrationAccountPartnerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Logic App Integration Account Partner.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_integration_account = azure.logicapps.IntegrationAccount("exampleIntegrationAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="Standard")
example_integration_account_partner = azure.logicapps.IntegrationAccountPartner("exampleIntegrationAccountPartner",
resource_group_name=example_resource_group.name,
integration_account_name=example_integration_account.name,
business_identities=[azure.logicapps.IntegrationAccountPartnerBusinessIdentityArgs(
qualifier="ZZ",
value="AA",
)])
```
## Import
Logic App Integration Account Partners can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:logicapps/integrationAccountPartner:IntegrationAccountPartner example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Logic/integrationAccounts/account1/partners/partner1
```
:param str resource_name: The name of the resource.
:param IntegrationAccountPartnerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IntegrationAccountPartnerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
business_identities: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IntegrationAccountPartnerBusinessIdentityArgs']]]]] = None,
integration_account_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IntegrationAccountPartnerArgs.__new__(IntegrationAccountPartnerArgs)
if business_identities is None and not opts.urn:
raise TypeError("Missing required property 'business_identities'")
__props__.__dict__["business_identities"] = business_identities
if integration_account_name is None and not opts.urn:
raise TypeError("Missing required property 'integration_account_name'")
__props__.__dict__["integration_account_name"] = integration_account_name
__props__.__dict__["metadata"] = metadata
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
super(IntegrationAccountPartner, __self__).__init__(
'azure:logicapps/integrationAccountPartner:IntegrationAccountPartner',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
business_identities: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IntegrationAccountPartnerBusinessIdentityArgs']]]]] = None,
integration_account_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None) -> 'IntegrationAccountPartner':
"""
Get an existing IntegrationAccountPartner resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IntegrationAccountPartnerBusinessIdentityArgs']]]] business_identities: A `business_identity` block as documented below.
:param pulumi.Input[str] integration_account_name: The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] metadata: A JSON mapping of any Metadata for this Logic App Integration Account Partner.
:param pulumi.Input[str] name: The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IntegrationAccountPartnerState.__new__(_IntegrationAccountPartnerState)
__props__.__dict__["business_identities"] = business_identities
__props__.__dict__["integration_account_name"] = integration_account_name
__props__.__dict__["metadata"] = metadata
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
return IntegrationAccountPartner(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="businessIdentities")
def business_identities(self) -> pulumi.Output[Sequence['outputs.IntegrationAccountPartnerBusinessIdentity']]:
"""
A `business_identity` block as documented below.
"""
return pulumi.get(self, "business_identities")
@property
@pulumi.getter(name="integrationAccountName")
def integration_account_name(self) -> pulumi.Output[str]:
"""
The name of the Logic App Integration Account. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "integration_account_name")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional[str]]:
"""
A JSON mapping of any Metadata for this Logic App Integration Account Partner.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Logic App Integration Account Partner. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Logic App Integration Account Partner should exist. Changing this forces a new Logic App Integration Account Partner to be created.
"""
return pulumi.get(self, "resource_group_name")
| 52.390428
| 247
| 0.69912
| 2,306
| 20,799
| 6.097138
| 0.082394
| 0.119061
| 0.05377
| 0.098009
| 0.852703
| 0.840327
| 0.824182
| 0.807468
| 0.799858
| 0.77027
| 0
| 0.004366
| 0.218087
| 20,799
| 396
| 248
| 52.522727
| 0.860173
| 0.413433
| 0
| 0.642512
| 1
| 0
| 0.156026
| 0.085187
| 0
| 0
| 0
| 0
| 0
| 1
| 0.154589
| false
| 0.004831
| 0.033816
| 0
| 0.280193
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c19e091f85515e4975261d392c41b4f89822e56
| 618,442
|
py
|
Python
|
maqluengine/admin.py
|
robcbryant/projectanka
|
54d713693a9e618bfa96c7d784563bc72fa0a8f8
|
[
"Apache-2.0"
] | null | null | null |
maqluengine/admin.py
|
robcbryant/projectanka
|
54d713693a9e618bfa96c7d784563bc72fa0a8f8
|
[
"Apache-2.0"
] | null | null | null |
maqluengine/admin.py
|
robcbryant/projectanka
|
54d713693a9e618bfa96c7d784563bc72fa0a8f8
|
[
"Apache-2.0"
] | null | null | null |
#################################################################################################################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################################################################################################################
# NEW ADMIN TO REPLACE OLD
#################################################################################################################################################################################################################################################################################################################################
#
# *This newer Maqlu Admin System is designed/created by Robert Bryant, based on a designed/created Model structure in Django for Dynamic Entity(or model) creation by end-users
# *This is created on behalf of an UPENN Museum project directed by Holly Pittman, and Steve Tinney
# *Licensing has not yet been determined by the project so distribution is not allowed until source is made available on GIT with an associated license file
#
#
#===========================================================================================================================================================
from django.contrib.admin.views.main import ChangeList
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.safestring import mark_safe
from datetime import datetime
from django.utils.http import urlencode
from django.contrib import messages
from django.contrib.auth.models import User
import csv
import sys
from django.db.models import Q, Count, Max
import re
from django.contrib.contenttypes.models import ContentType
from django.shortcuts import render_to_response
from django.template import RequestContext
import urllib2
from django.conf import settings
from django.contrib import admin
from maqluengine.models import *
from .models import FormProject, Form, FormRecordAttributeType, FormRecordAttributeValue
from .models import FormRecordReferenceType, FormRecordReferenceValue, FormType
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
from django.core.urlresolvers import resolve
from django.utils.functional import cached_property
from django.contrib.admin import AdminSite
from django.http import HttpResponse
from django.conf.urls import patterns
from django.views import generic
from django.http import Http404
from time import sleep
from django.contrib.staticfiles.storage import staticfiles_storage
import json
from django.utils.encoding import smart_unicode
from django.shortcuts import redirect
import random
import time
from django.core import serializers
import uuid
import zipfile
import io
import contextlib
###########################################################################################################
# ERROR / INFO LOGGER SETUP
###########################################################################################################
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
hdlr = logging.FileHandler('/var/tmp/django-db-log.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s [%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
###########################################################################################################
###########################################################################################################
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
# CUSTOM ADMIN FUNCTIONS -- used by Admin Views
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
##==========================================================================================================================
## Temporary Experimental Functions ****************************************************************************************
##==========================================================================================================================
def remove_all_form_hierarchy_parent_references(formtype):
for aForm in formtype.form_set.all():
aForm.hierarchy_parent = None
aForm.save()
def CheckPostDataForDeletions(post_data):
#print >>sys.stderr, post_data
if 'delete-form-type' in post_data:
FormType.objects.get(pk=post_data.get('delete-form-type')).delete()
return True
elif 'delete-form-type-group' in post_data:
#Rather than delete all the form types under the group, we'll loop through them and unattach them so that
# when the form type group is deleted they just go to fall under project again.
currentFormTypeGroup = FormTypeGroup.objects.get(pk=post_data.get('delete-form-type-group'))
for aFormType in currentFormTypeGroup.formtype_set.all():
aFormType.form_type_group = None
aFormType.save()
#Now delete the form type group after its children have been removed
currentFormTypeGroup.delete()
return True
elif 'delete-form' in post_data:
print >>sys.stderr, "Deleting"
formToDelete = Form.objects.get(pk=post_data.get('delete-form'))
formToDelete.delete()
else:
return False
def CheckPostDataForUniqueSessionPOSTSubmit(request):
#Let's do a check to make sure the user didn't hit the refresh button
#If we don't have a session id stored--then we are definitely okay to submit data--it's the first time
if 'sessionID' not in request.session:
request.session['sessionID'] = " ";
return True
#If we DO have a session ID already stored, then we need to make sure it's a unique value
#If they don't match--we're good to go. If the user hits the refresh button, it will send the same id already stored in session
#This won't allow them to submit form data until they hit the 'submit' button again. refreshing will not allow any action, because a new
#session id is never sent in POST
elif request.session['sessionID'] != request.POST['sesID']:
return True
else:
return False
#TODO: I'm using this class as a workaround in the template. It's passed in the admin view context_instance
#so I can have a bit more control over the template looping--I know this isn't Django's preferred way
#but it works for now.
class Counter(object):
count = 0
def set_true(self):
self.count = 1
return ''
def set_false(self):
self.count = 0
return ''
def increment(self):
self.count += 1
return ''
def decrement(self):
self.count -= 1
return ''
def reset(self):
self.count = 0
return''
def double(self):
self.count *= 2
return ''
##==========================================================================================================================
## Security Functions ****************************************************************************************
##==========================================================================================================================
def SECURITY_log_security_issues(userInfo, viewname, errormessage, META):
#This just prints some information to the server log about any errors/attempted hacks that need to be flagged
FLAG = "!!!! SECURITY FLAG !!!! ===> "
try: FLAG += "User: " + str(userInfo.username) + " - Access Level: " + str(userInfo.permissions.access_level) + " - in View: " + viewname + " -- UserIP: " + str(META.get('HTTP_X_FORWARDED_FOR', '') or META.get('REMOTE_ADDR')) + " - with Message: " + errormessage
except Exception as inst: FLAG += str(inst) + " USER INFO NOT FOUND IN SESSION - in View: " + viewname + " -- UserIP: " + str(META.get('HTTP_X_FORWARDED_FOR', '') or META.get('REMOTE_ADDR')) + " - with Message: " + errormessage
print >>sys.stderr, FLAG
def SECURITY_check_project_access(user, projectID):
#This returns a check to make sure the user's project code and the database item in question's project code
# --match. If they don't, it returns False, if they do it returns True.
#
# *All database EDIT/DELETE 's Must go through this check. Although it can normally be done through a simple Django filter
# * --this redundancy helps trigger warnings or send messages
if user.permissions.project != projectID:
return False
else:
return True
def SECURITY_check_user_permissions(requiredLevel, userLevel):
#There are currently 5 levels of access for a project: 1-5
# Level 5: (Admin) Project-wide permissions. Can freely edit/create/delete any aspect of that specific project
# \-Admins are the only user who can create new users and edit/delete sensitive project data
# Level 4: (Moderator) Can EDIT/CREATE/DELETE FormTypes, Forms, all RTYPEs, and all RVALS
# Level 3: (Power Data-Entry) Can EDIT FormTypes, EDIT/CREATE/DELETE Forms, and all RVALs
# Level 2: (Soft Data-Entry) Can EDIT/CREATE Forms, and all RVALs
# Level 1: (Researcher) Can only VIEW all data for project--normally projects will have some data set to "private"
# \-This gives someone privileged access to browse all PUBLIC and PRIVATE flagged data
# \-without allowing them to change any aspect of the project
#For additional security let's FORCE int() the values--the view requesting the bool should be doing this anyway
requiredLevel = int(requiredLevel)
userLevel = int(userLevel)
userIsIntCheckFlag = False;
requiredIsIntCheckFlag = False;
#Let's make sure they are both values between 1 and 5 for an additional level of security
for level in range(1,6):
if requiredLevel == level: requiredIsIntCheckFlag = True
if userLevel == level: userIsIntCheckFlag = True
#If we for ABSOLUTE sure have 2 ints between 1 and 5, then continue
if userIsIntCheckFlag == True and requiredIsIntCheckFlag == True:
#If the user's permission level is AT LEAST the required permission level, then return TRUE, signally it's okay to give access
if userLevel >= requiredLevel:
return True
#Otherwise the User does NOT have permission to access the View requesting authentication
return False
##==========================================================================================================================
## Helper Functions ****************************************************************************************
##==========================================================================================================================
def get_api_endpoints():
#------------------------------------------------------------------------------------------------------------------------------------
#:::This endpoint returns a JSON list of all urls to the admin endpoints in a key:value json object for admin views to pass to their
# --templates, which the javascript functions will access to get dynamic URLS without having to change them later
#
# This endpoint is not project specific so there is no security by project--only the access level of being an admin user
#------------------------------------------------------------------------------------------------------------------------------------
#Make a key value list of our URLS and add them by name
jsonData = {}
jsonData['get_rtypes'] = reverse('maqlu_admin:get_rtype_list')
jsonData['get_rtypes_rvals'] = reverse('maqlu_admin:get_form_rtypes')
jsonData['get_formtype_forms'] = reverse('maqlu_admin:get_formtype_form_list')
jsonData['create_new_template'] = reverse('maqlu_admin:create_new_form_type_template')
jsonData['load_recycling_bin'] = reverse('maqlu_admin:load_recycling_bin')
jsonData['restore_form_type'] = reverse('maqlu_admin:restore_form_type')
jsonData['recycle_form_type'] = reverse('maqlu_admin:recycle_form_type')
jsonData['delete_form_type'] = reverse('maqlu_admin:delete_form_type')
jsonData['restore_form'] = reverse('maqlu_admin:restore_form')
jsonData['recycle_form'] = reverse('maqlu_admin:recycle_form')
jsonData['delete_form'] = reverse('maqlu_admin:delete_form')
jsonData['restore_frat'] = reverse('maqlu_admin:restore_frat')
jsonData['recycle_frat'] = reverse('maqlu_admin:recycle_frat')
jsonData['delete_frat'] = reverse('maqlu_admin:delete_frat')
jsonData['restore_frrt'] = reverse('maqlu_admin:restore_frrt')
jsonData['recycle_frrt'] = reverse('maqlu_admin:recycle_frrt')
jsonData['delete_frrt'] = reverse('maqlu_admin:delete_frrt')
jsonData['create_form'] = reverse('maqlu_admin:create_new_form')
jsonData['edit_form'] = reverse('maqlu_admin:save_form_changes')
#convert python dict to a json string and send it back as a response
jsonData = json.dumps(jsonData);
return jsonData
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
# END OF CUSTOM ADMIN FUNCTIONS
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
# SETUP CUSTOM ADMIN VIEWS
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
class MyAdminSite(AdminSite):
def __init__(self, *args, **kwargs):
super(MyAdminSite, self).__init__(*args, **kwargs)
self.name = 'maqlu_admin'
self.app_name = 'admin'
##==========================================================================================================================
## AJAX ADMIN API ENDPOINTS ************************************************************************************************
##==========================================================================================================================
#-------------------------------------------------------------------------------------------------------
# EXPORTER ENDPOINTS
#=======================================================#
# ACCESS LEVEL : 1 EXPORT_PROJECT()
#=======================================================#
def export_project(self, request, **kwargs):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a Project pk value and returns a .zip file containing either flattened set of .csv files or .json file export of ALL form types and their
# --associated forms/values, etc. It additionally includes a separate CSV
#
# The CSV file has the following columns:
#
# -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -- - - --- ----
# | Form_PK | Form_Name | FormType_PK | FormType_Name | Att_Name_1 | Att_Name_2 | Ref_Att_Name_1_PKs | Ref_Att_Name_1_Labels | Reff Ref_Att_Name_2 | ...... ..... .....
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -- -- - -- --
# | 000 | Object A | 000 | Object Sheet | Att Value | Att Value | Val1_pk, Val2_pk, Val3_pk | Val1, Val2, Val3 | ..... .... .... ... . ..
#
# This is essentially a list of forms with column headings a direct 1:1 copy of FRATs. The FRRTs are a little more complex. Each FRRT is given in
# --2 columns to provide both a comma separated list of referenced form names, and a comma seperated list of their database PK values. The names
# --might be enough, but I like to have both for completion sake.
ERROR_MESSAGE = ""
print >> sys.stderr, request
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
project = FormProject.objects.get(pk=request.POST['id'])
if project.pk == request.user.permissions.project.pk:
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"row_index":"0","row_total":"0","is_complete":"False","row_timer":"0"}')
progressData.save()
keepAliveTimer = time.clock()
all_dicts = []
all_formTypes = FormType.objects.all().filter(project__pk=project.pk)
if all_formTypes:
counter = 0
total_formtypes = len(all_formTypes)
percent_formtype_interval = 100.0 / total_formtypes
print >>sys.stderr, percent_formtype_interval
for formtype in all_formTypes:
print >>sys.stderr, "Currently Loading: " + formtype.form_type_name
formtype_pct_done = counter * percent_formtype_interval
print >>sys.stderr, formtype_pct_done
progressData.jsonString = '{"percent_done":"'+str(formtype_pct_done)+'","current_formtype":"'+str(formtype.form_type_name)+'","is_complete":"False", "formtype_total":"'+str(total_formtypes)+'"}'
progressData.is_complete = False
progressData.save()
counter += 1
all_forms = formtype.form_set.all().filter(flagged_for_deletion=False)
# Load all of our FRATs and FRRTs
all_FRATs = formtype.formrecordattributetype_set.all().filter(flagged_for_deletion=False);
all_FRRTs = formtype.ref_to_parent_formtype.all().filter(flagged_for_deletion=False);
formtype_dict = []
formCounter = 0;
total_forms = len(all_forms)
form_pct_interval = percent_formtype_interval/total_forms
print >>sys.stderr, form_pct_interval
#Start loading all the forms by row now
if all_forms:
for form in all_forms:
#-------------------------------------------------------------------------------------------------
# This block handles the AJAX progress calls before every form is processed
#vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
form_pct_done = formtype_pct_done + (formCounter * form_pct_interval)
form_pct_done = int(form_pct_done * 100) / 100.0
#We re-initialize the progressData instance so it refreshes the values changed by the checkProgress() function
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"percent_done":"'+str(form_pct_done)+'","current_formtype":"'+str(formtype.form_type_name)+'","is_complete":"False", "formtype_total":"'+str(total_formtypes)+'", "current_form":"'+str(form.form_name)+'", "total_forms":"'+str(total_forms)+'"}'
progressData.is_complete = False
#We want to make sure that our timer is set at 5 second itnervals. The AJAX script sets the keep alive variable to True
# --every 1 second. I've set it to 5 seconds here to account for any delays that might occur over the network.
# --Every 5 seconds, this script resets the keep_alive variable to 'False', if it is already False--that means the user exited
# --the process on their AJAX end so we should stop adding this to the database and delete what we've already done.
#print >>sys.stderr, str(time.clock()) + " - " + str(keepAliveTimer) + " : " + str(progressData.keep_alive)
if time.clock() - keepAliveTimer > 5:
print >> sys.stderr, str (time.clock() - keepAliveTimer) + " : We are at the 5 second interval! " + str(formCounter)
#restart the keepAlive timer to the current time
keepAliveTimer = time.clock()
#delete the data if the user's AJAX end is unresponsive
if progressData.keep_alive == False:
print >> sys.stderr, "We are deleting our progress now--wish us luck!"
#TODO Delete all formtypes FRAT/FRRTs that we just tried making
progressData.delete()
#break from loop
print >>sys.stderr, "Breaking from export function--user/client was unresponsive"
ERROR_MESSAGE = "Breaking from export function--user/client was unresponsive"
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
else:
progressData.keep_alive = False
progressData.save()
formCounter += 1
# End of AJAX Progress processing for this iteration
#-------------------------------------------------------------------------------------------------
new_row = {'Form Name':form.form_name,'Form PK':str(form.pk),'Form Type Parent':formtype.form_type_name,'Form Type Parent PK':str(formtype.pk),}
#now load all the FRAVs
if all_FRATs:
for FRAT in all_FRATs:
currentFRAV = FRAT.formrecordattributevalue_set.all().filter(form_parent__pk=form.pk)
if currentFRAV.exists():
currentFRAV = currentFRAV[0]
new_row[FRAT.record_type] = currentFRAV.record_value
else:
new_row[FRAT.record_type] = ""
if all_FRRTs:
#now load all the FRRVs
for FRRT in all_FRRTs:
currentFRRV = FRRT.formrecordreferencevalue_set.all().filter(form_parent__pk=form.pk)
if currentFRRV.exists():
if currentFRRV[0].record_reference.all().exists():
allLabels = ""
allPKs = ""
for ref in currentFRRV[0].record_reference.all():
allPKs += '"' +str(ref.pk) + ","
allLabels += ref.form_name + ","
new_row[FRRT.record_type] = allLabels
new_row[FRRT.record_type+'__PKs'] = allPKs
else:
#Add a blank PK list and use the external key for the label list
new_row[FRRT.record_type] = currentFRRV[0].external_key_reference
new_row[FRRT.record_type+'__PKs'] = ""
else:
new_row[FRRT.record_type] = ""
new_row[FRRT.record_type+'__PKs'] = ""
formtype_dict.append(new_row)
all_dicts.append(formtype_dict)
print >>sys.stderr, str(len(all_dicts)) + " " + str(all_dicts[0])
#End our AJAX MEssaging
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"percent_done":"100","current_formtype":"None","is_complete":"True", "formtype_total":"1", "current_form":"None", "total_forms":"None"}'
progressData.is_complete = True;
progressData.save();
#Return the data as the specified format
if request.POST['export_format'] == "CSV":
#for each dict, let's make a list of CSV files
csv_file_list = []
for csv_dict in all_dicts:
keys = csv_dict[0].keys()
csv_file = io.BytesIO()
csv_writer = csv.DictWriter(csv_file, fieldnames=keys)
csv_writer.writerow(dict((fn,fn) for fn in keys))
csv_writer.writerows(csv_dict)
csv_file_list.append(csv_file)
if len(csv_dict) > 0:
print >>sys.stderr, str(len(csv_file_list)) + " " + csv_dict[0]['Form Type Parent']
else:
print >>sys.stderr, str(len(csv_file_list)) + " " + str(csv_dict)
zipped_file = io.BytesIO()
with contextlib.closing(zipfile.ZipFile(zipped_file, 'w')) as f:#TODO We need contextlin() here for py2.6 to 'close' the file. We can remove contextlib() and leave the rest as is for later python versions
for i, file in enumerate(csv_file_list):
print >>sys.stderr, str(file)
f.writestr("{0}__.csv".format(i), file.getvalue())
zipped_file.seek(0)
response = HttpResponse(zipped_file, mimetype='application/x-zip-compressed')
response['Content-Disposition'] = 'attachment; filename="'+project.name+'__'+request.user.username+'.zip"'
progressData.jsonString = '{"row_index":"100","is_complete":"True", "row_total":"100"}'
progressData.is_complete = True
progressData.save()
return response
elif request.POST['export_format'] == "JSON":
#for each dict, let's make a list of JSON files
json_file_list = []
for json_dict in all_dicts:
json_file = io.BytesIO(json.dumps(json_dict))
json_file_list.append(json_file)
zipped_file = io.BytesIO()
with contextlib.closing(zipfile.ZipFile(zipped_file, 'w')) as f:#TODO We need contextlin() here for py2.6 to 'close' the file. We can remove contextlib() and leave the rest as is for later python versions
for i, file in enumerate(json_file_list):
print >>sys.stderr, str(file)
f.writestr("{0}__.json".format(i), file.getvalue())
zipped_file.seek(0)
response = HttpResponse(zipped_file, mimetype='application/x-zip-compressed')
response['Content-Disposition'] = 'attachment; filename="'+project.name+'__'+request.user.username+'.zip"'
progressData.jsonString = '{"row_index":"100","is_complete":"True", "row_total":"100"}'
progressData.is_complete = True
progressData.save()
return response
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 EXPORT_FORMTYPE()
#=======================================================#
def export_formtype(self, request, **kwargs):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FormType pk value and returns a flattened .csv file export of all associated forms childed to this
# --FormType in the project, or a JSON dump of the created Python Dictionary
#
# The CSV file has the following columns:
#
# -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -- - - --- ----
# | Form_PK | Form_Name | FormType_PK | FormType_Name | Att_Name_1 | Att_Name_2 | Ref_Att_Name_1_PKs | Ref_Att_Name_1_Labels | Reff Ref_Att_Name_2 | ...... ..... .....
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -- -- - -- --
# | 000 | Object A | 000 | Object Sheet | Att Value | Att Value | Val1_pk, Val2_pk, Val3_pk | Val1, Val2, Val3 | ..... .... .... ... . ..
#
# This is essentially a list of forms with column headings a direct 1:1 copy of FRATs. The FRRTs are a little more complex. Each FRRT is given in
# --2 columns to provide both a comma separated list of referenced form names, and a comma seperated list of their database PK values. The names
# --might be enough, but I like to have both for completion sake.
ERROR_MESSAGE = ""
print >> sys.stderr, request
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
formtype = FormType.objects.get(pk=request.POST['id'])
if formtype.project.pk == request.user.permissions.project.pk and formtype.flagged_for_deletion == False:
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"row_index":"0","row_total":"0","is_complete":"False","row_timer":"0"}')
progressData.save()
keepAliveTimer = time.clock()
csv_string = ""
all_forms = formtype.form_set.all().filter(flagged_for_deletion=False)
# Load all of our FRATs and FRRTs
all_FRATs = formtype.formrecordattributetype_set.all();
all_FRRTs = formtype.ref_to_parent_formtype.all();
csv_dict = []
formCounter = 0
total_forms = len(all_forms);
form_pct_interval = 100.0 / total_forms;
#Start loading all the forms by row now
if all_forms:
for form in all_forms:
#-------------------------------------------------------------------------------------------------
# This block handles the AJAX progress calls before every form is processed
#vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
form_pct_done = formCounter * form_pct_interval
form_pct_done = int(form_pct_done * 100) / 100.0
#We re-initialize the progressData instance so it refreshes the values changed by the checkProgress() function
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"percent_done":"'+str(form_pct_done)+'","current_formtype":"'+str(formtype.form_type_name)+'","is_complete":"False", "formtype_total":"1", "current_form":"'+str(form.form_name)+'", "total_forms":"'+str(total_forms)+'"}'
progressData.is_complete = False
#We want to make sure that our timer is set at 5 second itnervals. The AJAX script sets the keep alive variable to True
# --every 1 second. I've set it to 5 seconds here to account for any delays that might occur over the network.
# --Every 5 seconds, this script resets the keep_alive variable to 'False', if it is already False--that means the user exited
# --the process on their AJAX end so we should stop adding this to the database and delete what we've already done.
print >>sys.stderr, str(time.clock()) + " - " + str(keepAliveTimer) + " : " + str(progressData.keep_alive)
if time.clock() - keepAliveTimer > 5:
print >> sys.stderr, str (time.clock() - keepAliveTimer) + " : We are at the 5 second interval! " + str(formCounter)
#restart the keepAlive timer to the current time
keepAliveTimer = time.clock()
#delete the data if the user's AJAX end is unresponsive
if progressData.keep_alive == False:
print >> sys.stderr, "We are deleting our progress now--wish us luck!"
#TODO Delete all formtypes FRAT/FRRTs that we just tried making
progressData.delete()
#break from loop
print >>sys.stderr, "Breaking from export function--user/client was unresponsive"
ERROR_MESSAGE = "Breaking from export function--user/client was unresponsive"
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
else:
progressData.keep_alive = False
progressData.save()
formCounter += 1
# End of AJAX Progress processing for this iteration
#-------------------------------------------------------------------------------------------------
new_row = {'Form Name':form.form_name,'Form PK':str(form.pk),'Form Type Parent':formtype.form_type_name,'Form Type Parent PK':str(formtype.pk),}
#now load all the FRAVs
for FRAT in all_FRATs:
currentFRAV = FRAT.formrecordattributevalue_set.all().filter(form_parent__pk=form.pk)
if currentFRAV.exists():
currentFRAV = currentFRAV[0]
new_row[FRAT.record_type] = currentFRAV.record_value
else:
new_row[FRAT.record_type] = ""
#now load all the FRRVs
for FRRT in all_FRRTs:
currentFRRV = FRRT.formrecordreferencevalue_set.all().filter(form_parent__pk=form.pk)
if currentFRRV.exists():
if currentFRRV[0].record_reference.all().exists():
allLabels = ""
allPKs = ""
for ref in currentFRRV[0].record_reference.all():
allPKs += '"' +str(ref.pk) + ","
allLabels += ref.form_name + ","
new_row[FRRT.record_type] = allLabels
new_row[FRRT.record_type+'__PKs'] = allPKs
else:
#Add a blank PK list and use the external key for the label list
new_row[FRRT.record_type] = currentFRRV[0].external_key_reference
new_row[FRRT.record_type+'__PKs'] = ""
else:
new_row[FRRT.record_type] = ""
new_row[FRRT.record_type+'__PKs'] = ""
#remove the trailing comma after the last column heading and end the line
csv_dict.append(new_row)
#End our AJAX MEssaging
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"percent_done":"100","current_formtype":"None","is_complete":"True", "formtype_total":"1", "current_form":"None", "total_forms":"None"}'
progressData.is_complete = True;
progressData.save();
if(request.POST['export_format'] == "CSV"):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="'+formtype.form_type_name+'__'+request.user.username+'.csv"'
keys = csv_dict[0].keys()
csv_file = csv.DictWriter(response, fieldnames=keys)
csv_file.writerow(dict((fn,fn) for fn in keys))
csv_file.writerows(csv_dict)
return response
elif (request.POST['export_format'] == "JSON"):
jsonResponse = json.dumps(csv_dict)
response = HttpResponse(jsonResponse, mimetype='application/json')
response['Content-Disposition'] = 'attachment; filename="'+formtype.form_type_name+'__'+request.user.username+'.json"'
return response
else:
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#-------------------------------------------------------------------------------------------------------
# RECYLING BIN ENDPOINTS
#=======================================================#
# ACCESS LEVEL : 4 LOAD_RECYCLING_BIN()
#=======================================================#
def load_recycling_bin(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# Loads the recycling bin
ERROR_MESSAGE = ""
print >> sys.stderr, request
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
project = FormProject.objects.get(pk=request.POST['project_pk'])
if project.pk == request.user.permissions.project.pk:
recycleBinJSON = {}
recycledItems = []
recycleBinJSON['recycled_items'] = recycledItems
#First find all FormTypes that are flagged and add them to the list
recycledFormTypes = FormType.objects.all().filter(project__pk=project.pk, flagged_for_deletion=True)
for formType in recycledFormTypes:
currentItem = {}
currentItem['item_pk'] = formType.pk
currentItem['item_label'] = formType.form_type_name
currentItem['item_type'] = "FTYPE"
currentItem['date_deleted'] = str(formType.date_last_modified)
recycledItems.append(currentItem);
#Find all recycled Forms WITHOUT FormType parents that are also recycled(We don't want to list Forms of recycled FormTypes)
recycledForms = Form.objects.all().filter(project__pk=project.pk, flagged_for_deletion=True, form_type__flagged_for_deletion=False)
for form in recycledForms:
currentItem = {}
currentItem['item_pk'] = form.pk
currentItem['item_label'] = form.form_name
currentItem['item_type'] = "FORM"
currentItem['parent_form_type'] = form.form_type.form_type_name
currentItem['date_deleted'] = str(form.date_last_modified)
recycledItems.append(currentItem);
#Find all recycled FRATS without FormType parents that are also recycled
recycledFRATs = FormRecordAttributeType.objects.all().filter(project__pk=project.pk, flagged_for_deletion=True, form_type__flagged_for_deletion=False)
for FRAT in recycledFRATs:
currentItem = {}
currentItem['item_pk'] = FRAT.pk
currentItem['item_label'] = FRAT.record_type
currentItem['item_type'] = "FRAT"
currentItem['parent_form_type'] = FRAT.form_type.form_type_name
currentItem['date_deleted'] = str(FRAT.date_last_modified)
recycledItems.append(currentItem);
#Find all recycled FRATS without FormType parents that are also recycled
recycledFRRTs = FormRecordReferenceType.objects.all().filter(project__pk=project.pk, flagged_for_deletion=True, form_type_parent__flagged_for_deletion=False)
for FRRT in recycledFRRTs:
currentItem = {}
currentItem['item_pk'] = FRRT.pk
currentItem['item_label'] = FRRT.record_type
currentItem['item_type'] = "FRRT"
currentItem['parent_form_type'] = FRRT.form_type_parent.form_type_name
currentItem['date_deleted'] = str(FRRT.date_last_modified)
recycledItems.append(currentItem);
jsonResponse = json.dumps(recycleBinJSON)
#SUCCESS!!
return HttpResponse(jsonResponse,content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RECYCLE_FORM_TYPE()
#=======================================================#
def recycle_form_type(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FormType pk value and flags it for the recycling bin so it no longer is shown--hidden from all
# --all queries and access until restored by a level 5 access privilege user. It allows for the temporary deletion of
# --'deletion' of database items, but allows for them to be restored effectively like an 'undo' button
#
# This also loops through all childed Forms of the FormType and flags them for recycling. There is no need to flag the childed
# --FRATs or FRRTs, because no endpoint or query will allow them to be accessed without their parent Form or FormType requesting them.
# --They can be individually flagged, but there is no need to flag them here.
ERROR_MESSAGE = ""
print >> sys.stderr, request
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
formtype = FormType.objects.get(pk=request.POST['ID'])
if formtype.project.pk == request.user.permissions.project.pk:
formtype.flagged_for_deletion = True
formtype.save()
#now flag all of its chlidren forms
childForms = formtype.form_set.all()
#Load all forms into cache with a single database hit
if childForms:
for aForm in childForms:
aForm.flagged_for_deletion = True
aForm.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 3 RECYCLE_FORM()
#=======================================================#
def recycle_form(self, request):
#***************#
ACCESS_LEVEL = 3
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a Form pk value and flags it for the recycling bin so it no longer is shown--hidden from all
# --all queries and access until restored by a level 5 access privilege user. It allows for the temporary deletion of
# --'deletion' of database items, but allows for them to be restored effectively like an 'undo' button
#
#
# There is no need to flag its childed FRAVs or FRRVs because they can't be accessed without this form
# --requesting access to them
#
# !!TODO!! It may be possible to query them still in the query tools--I'll have to check if their parent Form
# --limits the django filters. If not, then we will need to loop through all the rvals and turn their recycle
# --flags on. !!!!!Upon further investigation--it would appear they ARE limited by their Form parent, so let's ignore this.
# !!!!! --but a little more investigation could be ncessary, so let's leave this open ended for now
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
form = Form.objects.get(pk=request.POST['ID'])
if form.project.pk == request.user.permissions.project.pk:
form.flagged_for_deletion = True
form.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 3 RECYCLE_FRAT()
#=======================================================#
def recycle_frat(self, request):
#***************#
ACCESS_LEVEL = 3
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FormRecordAttributeType pk value and flags it for the recycling bin so it no longer is shown--hidden from all
# --all queries and access until restored by a level 5 access privilege user. It allows for the temporary deletion of
# --'deletion' of database items, but allows for them to be restored effectively like an 'undo' button
#
# No need to worry about the childed FRAVs because they shouldn't be loaded with the Form without a FRAT anyway.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
frat = FormRecordAttributeType.objects.get(pk=request.POST['ID'])
if frat.project.pk == request.user.permissions.project.pk:
frat.flagged_for_deletion = True
frat.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 3 RECYCLE_FRRT()
#=======================================================#
def recycle_frrt(self, request):
#***************#
ACCESS_LEVEL = 3
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FormRecordReferenceType pk value and flags it for the recycling bin so it no longer is shown--hidden from all
# --all queries and access until restored by a level 5 access privilege user. It allows for the temporary deletion of
# --'deletion' of database items, but allows for them to be restored effectively like an 'undo' button
#
# No need to worry about the childed FRRVs because they shouldn't be loaded with the Form without a FRAT anyway.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
frrt = FormRecordReferenceType.objects.get(pk=request.POST['ID'])
if frrt.project.pk == request.user.permissions.project.pk:
frrt.flagged_for_deletion = True
frrt.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RESTORE_FORM_TYPE()
#=======================================================#
def restore_form_type(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FormType pk value and flags it for restoring from the recycling bin. It essentially 'undo's the object
# --and restores it to its previous state.
#
# This also loops through all childed Forms of the FormType and flags them for restoration. There is no need to flag the childed
# --FRATs or FRRTs, because they are left untouched by the recycling endpoint
ERROR_MESSAGE = ""
print >> sys.stderr, request
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
formtype = FormType.objects.get(pk=request.POST['ID'])
if formtype.project.pk == request.user.permissions.project.pk:
formtype.flagged_for_deletion = False
formtype.save()
#now flag all of its chlidren forms
childForms = formtype.form_set.all()
#Load all forms into cache with a single database hit
if childForms:
for aForm in childForms:
aForm.flagged_for_deletion = False
aForm.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RESTORE_FORM()
#=======================================================#
def restore_form(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a Form pk value and flags it for restoring from the recycling bin. It essentially 'undo's the object
# --and restores it to its previous state.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
form = Form.objects.get(pk=request.POST['ID'])
if form.project.pk == request.user.permissions.project.pk:
form.flagged_for_deletion = False
form.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RESTORE_FRAT()
#=======================================================#
def restore_frat(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FRAT pk value and flags it for restoring from the recycling bin. It essentially 'undo's the object
# --and restores it to its previous state.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
frat = FormRecordAttributeType.objects.get(pk=request.POST['ID'])
if frat.project.pk == request.user.permissions.project.pk:
frat.flagged_for_deletion = False
frat.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RESTORE_FRRT()
#=======================================================#
def restore_frrt(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes a FRRT pk value and flags it for restoring from the recycling bin. It essentially 'undo's the object
# --and restores it to its previous state.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
frrt = FormRecordReferenceType.objects.get(pk=request.POST['ID'])
if frrt.project.pk == request.user.permissions.project.pk:
frrt.flagged_for_deletion = False
frrt.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#-------------------------------------------------------------------------------------------------------
# MODEL DELETION ENDPOINTS
#=======================================================#
# ACCESS LEVEL : 5 DELETE_FORM_TYPE()
#=======================================================#
def delete_form_type(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in
ERROR_MESSAGE = ""
print >> sys.stderr, request
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
formtype = FormType.objects.get(pk=request.POST['ID'])
if formtype.project.pk == request.user.permissions.project.pk:
formtype.delete()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 DELETE_FORM()
#=======================================================#
def delete_form(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
form = Form.objects.get(pk=request.POST['ID'])
if form.project.pk == request.user.permissions.project.pk:
form.delete()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 DELETE_FRAT()
#=======================================================#
def delete_frat(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
frat = FormRecordAttributeType.objects.get(pk=request.POST['ID'])
if frat.project.pk == request.user.permissions.project.pk:
frat.delete()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 DELETE_FRRT()
#=======================================================#
def delete_frrt(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
frrt = FormRecordReferenceType.objects.get(pk=request.POST['ID'])
if frrt.project.pk == request.user.permissions.project.pk:
frrt.delete()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 DELETE_FORM_TYPE_GROUP()
#=======================================================#
def delete_form_type_group(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint deletes a form type group -- we don't need to worry about recycling these because they don't take much
# --effort to recreate; they are just arbitrary labels
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
ftg = FormTypeGroup.objects.get(pk=request.POST['ID'])
if ftg.project.pk == request.user.permissions.project.pk:
ftg.delete()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are attempting to access another project's data!"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#-------------------------------------------------------------------------------------------------------
# MODEL MODIFICATION ENDPOINTS
#=======================================================#
# ACCESS LEVEL : 4 CREATE_NEW_FORM_TYPE_TEMPLATE()
#=======================================================#
def create_new_form_type_template(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in POST data submitted by a user to add a new template to the formtype's existing json object string
# --if the template name is the same name as an existing template, it will simply overwrite it.
# --First it converts the json string to a python dict and adds a key with the same template name
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
print >>sys.stderr, request.POST
if 'formtype_id' in request.POST:
currentFormType = FormType.objects.get(pk=request.POST['formtype_id'], project=request.user.permissions.project)
#Convert the new template in the POST to a python Dict
newTemplateString = request.POST['template_json']
newTemplateDict = json.loads(newTemplateString)
print >>sys.stderr, currentFormType.template_json
#Convert the formtype's json to a python dict
currentTemplateString = currentFormType.template_json
if currentTemplateString != "" and currentTemplateString != None:
currentTemplateDict = json.loads(currentTemplateString)
for key in newTemplateDict:
currentTemplateDict[key] = newTemplateDict[key]
currentFormType.template_json = json.dumps(currentTemplateDict)
else:
currentFormType.template_json = newTemplateString
currentFormType.save();
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 CREATE_NEW_FORM_TYPE()
#=======================================================#
def create_new_form_type(self, request):
#***************#
ACCESS_LEVEL = 4
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in POST data submitted by a the create new form type page. It's similar to the 'edit_form_type' endpoint
# --but it only creates new objects in the database rather than edits them.
#
# It requires a level 4 access to make new form types. We also put in a project restriction on the formtype constrained by the
# --project ID in the user's permissions. If the formtype doesn't match the user's project, it will bring up an error page.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
print >>sys.stderr, request.POST
post_data = request.POST
newFormType = FormType()
#Update the form's basic attributes
newFormType.form_type_name = post_data.get('form_type_name')
newFormType.project = request.user.permissions.project
#Add the user information - We only set created by in endpoints that create the model for the first time
newFormType.created_by = request.user
newFormType.modified_by = request.user
#add the appropriate flag for the formtype's hard-coded type: e.g. is is a media or control group?
if post_data.get('ft_media_type') != '-1':
newFormType.type = 1;
#also add the media type, e.g. img/pdf/3d etc.
newFormType.media_type = post_data.get('ft_media_type')
newFormType.file_extension = str(post_data.get('file_extension'))
#If there is a URI prefix then add one--otherwise set it to None
if 'uri_prefix' in post_data:
if post_data['uri_prefix'] != "" or post_data['uri_prefix'] != " ":
newFormType.uri_prefix = post_data['uri_prefix']
else:
newFormType.uri_prefix = None
#Make sure that the hierarchy and group settings are kept null
newFormType.form_type_group = None
newFormType.is_hierarchical = False
#We need to delete all of the child Forms parent references
remove_all_form_hierarchy_parent_references(newFormType)
else:
newFormType.type = 0;
#Update the form type's group
#If it's a new group
if post_data.get('ft_group') == 'NEW':
#Create a new formtype group
newFormTypeGroup = FormTypeGroup(name=request.POST['ft_group_new'], project=newFormType.project)
#Add the user information - We only set created by in endpoints that create the model for the first time
newFormTypeGroup.created_by = request.user
newFormTypeGroup.modified_by = request.user
newFormTypeGroup.save()
newFormType.form_type_group = newFormTypeGroup
#If it's coded to remove the group, then set the field to null
elif post_data.get('ft_group') == 'NONE':
newFormType.form_type_group = None
#Otherwise it's not a new group and not being removed so use the provided value
else:
newFormType.form_type_group = FormTypeGroup.objects.get(pk=request.POST['ft_group'])
print >>sys.stderr, "WTF!!!! " + post_data.get('ft_group')
#update the formtypes status as hierarchical
if 'is_hierarchical' in post_data:
newFormType.is_hierarchical = True
else:
newFormType.is_hierarchical = True
newFormType.save()
#Update all of the FormType's FormRecordAttributeTypes
for key in post_data:
splitKey = key.split("__")
if len(splitKey) == 3:
code,type_pk,instruction = splitKey
#If we are creating a new attribute type
if code == "frat" and instruction == "new":
newAttributeType = FormRecordAttributeType(record_type=post_data[key])
newAttributeType.form_type = newFormType
#Add the user information - We only set created by in endpoints that create the model for the first time
newAttributeType.created_by = request.user
newAttributeType.modified_by = request.user
newAttributeType.project = newFormType.project
if post_data[code + '__' + type_pk + '__order'] != "":
newAttributeType.order_number = int(post_data[code + '__' + type_pk + '__order'])
else:
#We need to give a random order number--if we don't, when Django attempts to order queries, it will get confused
#--if two of the attribute types share the same number. If they have more than 600 unique columns---it won't matter
#--anyway, because order just shows the first 5--this will just help the initial setup if someone doesn't set the
#--order fields at all.
newAttributeType.order_number = random.randint(399,999)
newAttributeType.save()
#If we are creating a new reference type
if code == "frrt" and instruction == "new":
newReferenceType = FormRecordReferenceType(record_type=post_data[key])
newReferenceType.form_type_parent = newFormType
newReferenceType.project = newFormType.project
#Add the user information - We only set created by in endpoints that create the model for the first time
newReferenceType.created_by = request.user
newReferenceType.modified_by = request.user
#we use the auto-incremented temp id used in the javascript form to match the refeerence value for this ref type
if post_data["nfrrt__"+type_pk+"__ref"] == "self-reference":
newReferenceType.form_type_reference = newFormType
elif post_data["nfrrt__"+type_pk+"__ref"] == "-1":
newReferenceType.form_type_reference = None
else:
newReferenceType.form_type_reference = FormType.objects.get(pk=post_data["nfrrt__"+type_pk+"__ref"])
if post_data['n' + code + '__' + type_pk + '__order'] != "":
newReferenceType.order_number = int(post_data['n' + code + '__' + type_pk + '__order'])
else:
#See explanation above ^^^^^^^^^ for this random int range
newReferenceType.order_number = random.randint(399,999)
newReferenceType.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 3 SAVE_FORM_TYPE_CHANGES()
#=======================================================#
def save_form_type_changes(self, request):
#***************#
ACCESS_LEVEL = 3
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in POST data submitted by a form type editing page and makes the necessary changes. It also handles
# --any tools in the form type editor, e.g. changing a attribute RTYPE to a refrence RTYPE. Another Endpoint handles creating NEW
# --formtypes. This is only used for editing.
#
# It requires a level 3 access to make form type changes. We also put in a project restriction on the formtype constrained by the
# --project ID in the user's permissions. If the formtype query set is 0 in length, then this endpoint will return an error
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
deletedObjects = {}
formTypeToEdit = FormType.objects.get(pk=request.POST['formtype_pk'])
if formTypeToEdit.project.pk == request.user.permissions.project.pk:
post_data = request.POST
#Update the form's basic attributes
formTypeToEdit.form_type_name = post_data.get('form_type_name')
#Add the user information
formTypeToEdit.modified_by = request.user
#add the appropriate flag for the formtype's har-coded type: e.g. is is a media or control group?
print >>sys.stderr, post_data.get('formtype-type')
if post_data.get('ft_media_type') != '-1':#media
formTypeToEdit.type = 1;
#also add the media type, e.g. img/pdf/3d etc.
formTypeToEdit.media_type = post_data.get('ft_media_type')
formTypeToEdit.file_extension = post_data.get('file_extension')
#If there is a URI prefix then add one--otherwise set it to None
if 'uri_prefix' in post_data:
if post_data['uri_prefix'] != "" or post_data['uri_prefix'] != " ":
formTypeToEdit.uri_prefix = post_data['uri_prefix']
else:
formTypeToEdit.uri_prefix = None
#Make sure that the hierarchy and group settings are kept null
formTypeToEdit.form_type_group = None
formTypeToEdit.is_hierarchical = False
#We need to delete all of the child Forms parent references
remove_all_form_hierarchy_parent_references(formTypeToEdit)
else:
formTypeToEdit.type = 0; #standard formtype
#Update the form type's group
#If it's a new group
if post_data.get('ft_group') == 'NEW':
#Create a new formtype group
newFormTypeGroup = FormTypeGroup(name=post_data.get('ft_group_new'), project=request.user.permissions.project)
#Add the user information
newFormTypeGroup.modified_by = request.user
newFormTypeGroup.created_by = request.user
newFormTypeGroup.save()
formTypeToEdit.form_type_group = newFormTypeGroup
#If it's coded to remove the group, then set the field to null
elif post_data.get('ft_group') == 'NONE':
formTypeToEdit.form_type_group = None
#Otherwise it's not a new group and not being removed so use the provided value
else:
formTypeToEdit.form_type_group = FormTypeGroup.objects.get(pk=post_data.get('ft_group'))
print >>sys.stderr, "WTF!!!! " + post_data.get('ft_group')
#update the formtypes status as hierarchical
if 'is_hierarchical' in post_data:
formTypeToEdit.is_hierarchical = True
else:
formTypeToEdit.is_hierarchical = False
#Save the formtype
formTypeToEdit.save()
#Update all of the form's FormRecordAttributeTypes
for key in post_data:
splitKey = key.split("__")
if len(splitKey) > 1:
#--------------------------------------------------------------------------------------------------------
#Update all of the form's FormRecordAttributeTypes
#--------------------------------------------------------------------------------------------------------
# $$SS-VALIDATION$$ This "If" checks to make sure no keys that have been removed for different reasons are used going forward $$
logging.info("CURRENT KEY: " + key + "Is in deleted objects?")
print >> sys.stderr, "Fucking keys = ?? ",
for akey in deletedObjects:
print >> sys.stderr, akey+", ",
print >>sys.stderr, " "
if key not in deletedObjects:
if len(splitKey) == 2:
code,type_pk = splitKey
if code == "frat":
currentAttributeType = FormRecordAttributeType.objects.get(pk=type_pk)
currentAttributeType.record_type = post_data[key]
if post_data[key + '__order'] != "":
currentAttributeType.order_number = int(post_data[key + '__order'])
else:
#We need to give a random order number--if we don't, when Django attempts to order queries, it will get confused
#--if two of the attribute types share the same number. If they have more than 600 unique columns---it won't matter
#--anyway, because order just shows the first 5--this will just help the initial setup if someone doesn't set the
#--order fields at all.
currentAttributeType.order_number = random.randint(399,999)
#Add the user information
currentAttributeType.modified_by = request.user
currentAttributeType.save()
if len(splitKey) == 3:
code,type_pk,instruction = splitKey
#If we are creating a new attribute type
if code == "frat" and instruction == "new":
newAttributeType = FormRecordAttributeType(record_type=post_data[key])
newAttributeType.form_type = formTypeToEdit
if post_data[code + '__' + type_pk + '__order'] != "":
newAttributeType.order_number = int(post_data[code + '__' + type_pk + '__order'])
else:
#We need to give a random order number--if we don't, when Django attempts to order queries, it will get confused
#--if two of the attribute types share the same number. If they have more than 600 unique columns---it won't matter
#--anyway, because order just shows the first 5--this will just help the initial setup if someone doesn't set the
#--order fields at all.
newAttributeType.order_number = random.randint(399,999)
#Add the user information
newAttributeType.modified_by = request.user
newAttributeType.created_by = request.user
newAttributeType.save()
#TODO: Techincally all related forms to this formtype won't have an attached value until edited on the admin page
#Should I go ahead and add a null attribute value?
#If we are getting an instruction from the user to delete this attribute type then delete it
elif code== "frat" and instruction == "DEL":
#*** RECYCLING BIN *** pass this FRAT to the recycling bin
recycledFRAT = FormRecordAttributeType.objects.get(pk=type_pk)
recycledFRAT.flagged_for_deletion = True
recycledFRAT.save();
#--------------------------------------------------------------------------------------------------------------
#If we're converting an attribute type into the form number, we'll do that here with the proper instruction
#--------------------------------------------------------------------------------------------------------------
elif code== "frat" and instruction == "switch-id":
#We are going to have to loop through each form of this form type, and switch the values of the form ids and chosen FRAT to replace it with
#--I think it's best to do this rather than make a new FRAT and new FRRVs which require more database actions. We are just swapping values on the existing database items
#Get the current attribute type we are editing
switchFRAT = FormRecordAttributeType.objects.get(pk=type_pk)
#loop through the forms of this form type
for aForm in formTypeToEdit.form_set.all():
#Store the form's id in a temp variable
oldID = aForm.form_name
#now update the ID with the value of this form's related FRAT
thisFRAV = aForm.formrecordattributevalue_set.all().filter(record_attribute_type=switchFRAT)[0]
logging.info(str(thisFRAV) + " trying to change this ???? to : " + aForm.form_name)
aForm.form_name = thisFRAV.record_value
aForm.form_number = None
#update the FRAV with the form ID
thisFRAV.record_value = oldID
#Add the user information
thisFRAV.modified_by = request.user
aForm.modified_by = request.user
#save the changes
thisFRAV.save()
aForm.save()
#Finally change the FRAT label to "Old "FormType" ID
switchFRAT.record_type = "Old " + formTypeToEdit.form_type_name + " ID"
#Add the user information
switchFRAT.modified_by = request.user
switchFRAT.save()
#--------------------------------------------------------------------------------------------------------
#Update all of the form's FormRecordReferenceTypes
#--------------------------------------------------------------------------------------------------------
# $$SS-VALIDATION$$ This "If" checks to make sure no keys that have been removed for different reasons are used going forward $$
if key not in deletedObjects:
if (len(splitKey) == 2):
code,type_pk = splitKey
#If we're changing the label of the reference type or it's order then save those changes here
if code == "frrt":
currentReferenceType = FormRecordReferenceType.objects.get(pk=type_pk)
currentReferenceType.record_type = post_data[key]
if post_data[key + '__order'] != "":
currentReferenceType.order_number = int(post_data[key + '__order'])
else:
#See explanation above ^^^^^^^^^ for this random int range
currentReferenceType.order_number = random.randint(399,999)
#Add the user information
currentReferenceType.modified_by = request.user
currentReferenceType.save()
if (len(splitKey) == 3):
code,type_pk,instruction = splitKey
# #if adding a new record reference type
if code == "frrt" and instruction == "new":
logging.info("FOR F*** SAKE : " + post_data[key] + " === " + post_data["nfrrt__"+type_pk+"__ref"])
newReferenceType = FormRecordReferenceType(record_type=post_data[key])
newReferenceType.form_type_parent = formTypeToEdit
#we use the auto-incremented temp id used in the javascript form to match the refeerence value for this ref type
if post_data["nfrrt__"+type_pk+"__ref"] == "-1":
newReferenceType.form_type_reference = None
else:
newReferenceType.form_type_reference = FormType.objects.get(pk=post_data["nfrrt__"+type_pk+"__ref"])
if post_data['n' + code + '__' + type_pk + '__order'] != "":
newReferenceType.order_number = int(post_data['n' + code + '__' + type_pk + '__order'])
else:
#See explanation above ^^^^^^^^^ for this random int range
newReferenceType.order_number = random.randint(399,999)
#Add the user information
newReferenceType.modified_by = request.user
newReferenceType.created_by = request.user
newReferenceType.save()
# #If we are getting an instruction from the user to delete this reference type then delete it
if code== "frrt" and instruction == "DEL":
#Django will "DELETE CASCADE" autmoatically this object and take care of deleting
#all the FormRecordReferenceValues that are attached to it in a ForeignKey
recycledFRRT = FormRecordReferenceType.objects.get(pk=type_pk)
recycledFRRT.flagged_for_deletion = True
recycledFRRT.save()
#----------------------------------------------------------------------------------------
# CHECK FOR ANY FLAGGED RECORD ATTRIBUTE TYPES TO BE CONVERTED TO REFERENCE TYPES
# OR IF THERE ARE ANY REF TYPES THAT NEED TO BE REFRESHED/CHANGED
#----------------------------------------------------------------------------------------
if (len(splitKey) == 3):
code,type_pk,instruction = splitKey
#Here we are checking Attribute Types
#-------------------------------------
#If we have a match instructing to convert this record attribute type to a record reference type--make the conversion
if code == 'frat' and instruction == 'is-new-ref':
thisFRAT = FormRecordAttributeType.objects.get(pk=type_pk)
#We need to quickly make any edits to the Attribute Type the User might have made
#--in the same screen, e.g. changing it's label name, or order number. We have to do this now
#--because when we delete the FRAT later--these items will only be updated if the post_data key list
#--happened to have that FRAT key first in line. We ensure any user edits are made to the FRAT now to be safe
#--and to be consistent. It's only two values: order_num and record_type
newFRRT = FormRecordReferenceType()
newFRRT.record_type = post_data[code+"__"+type_pk]#We use the label from the user form instead
newFRRT.order_number = post_data[code+"__"+type_pk+"__order"]#We use the order_num from the user form instead
newFRRT.is_public = thisFRAT.is_public
newFRRT.project = thisFRAT.project
newFRRT.form_type_parent = thisFRAT.form_type
#Make sure the user didn't set it to "None" so we don't get a server error.
#--Here we can leave it blank if "-1" because this is a new object created and None is the default
if post_data["frat__"+ type_pk +"__new-ref-id"] != "-1":
newFRRT.form_type_reference = FormType.objects.get(pk=post_data["frat__"+ type_pk +"__new-ref-id"])
#Add the user information
newFRRT.modified_by = request.user
newFRRT.created_by = request.user
newFRRT.save()
#Now convert the Record Attribute Type Values attached to this Record Attribute Type to Record Reference Values
#--tied to the newly created Record Reference Type
for thisFRAV in FormRecordAttributeValue.objects.filter(record_attribute_type=thisFRAT):
#logging.info(str(thisFRAV) + " <--FRAV : FRAT--> " + str(thisFRAT))
newFRRV = FormRecordReferenceValue()
newFRRV.external_key_reference = thisFRAV.record_value
newFRRV.form_parent = thisFRAV.form_parent
newFRRV.record_reference_type = newFRRT
newFRRV.project = thisFRAV.project
newFRRV.date_created = thisFRAV.date_created
newFRRV.created_by = thisFRAV.created_by
newFRRV.date_last_modified = thisFRAV.date_last_modified
#Add the user information
newFRRV.modified_by = request.user
#We need to save the newFRRV before trying to add manytomany values to it
newFRRV.save()
#Now try and match a reference through the new external value if the User didn't set the Form Type to "None"
#--Once again, we can leave this blank because the FRRV is a new object and None is the default value
if newFRRT.form_type_reference != None:
#And remember--Doh! Because the external key value can contain multiple values separated by comma, we need to take that into account
refValues = newFRRV.external_key_reference.split(",")
for value in refValues:
#Make ABSOLUTE sure that we are looking for form names under the selected FormType and NOT the current FormType
#--I made this devious mistake and it cost me hours and hours of headache down the road to figure out it was something
#--that I fudged up like a month ago. My god.
referenceLookup = newFRRT.form_type_reference.form_set.filter(form_name=value)
logging.info(referenceLookup.count())
if referenceLookup.count() > 0:
newFRRV.record_reference.add(referenceLookup[0])
#And save the new object!
newFRRV.save()
#Now delete all old attributes
#--This should delete all attached values as well because they follow the on_delete.CASCADE direction in models.py
#--This we DO delete permanently, because it's being converted to another model in the system--no point in recycling it because it can be
#--re-converted back to a FRAT later
thisFRAT.delete()
#We also need to add the post data key to the deletedObjects Dict() do they aren't used by this script later
#--in the event that the frat__pk key is after this post_value key in the dictionary iterations
deletedObjects['frat__'+type_pk] = None
#Here we are checking Reference Type Changes
#--------------------------------------------
if code == 'frrt' and instruction == 'is-new-ref':
thisFRRT = FormRecordReferenceType.objects.get(pk=type_pk)
#change the form type reference to newly selected
logging.info("TYPE PK? : "+type_pk + " old type ref? " + str(thisFRRT.form_type_reference))
#We need a check here to determine if the Object was set to "None" or not, otherwise we'll get an error trying to lookup a -1 pk value
if post_data["frrt__"+type_pk+"__new-ref-id"] != "-1":
thisFRRT.form_type_reference = FormType.objects.get(pk=post_data["frrt__"+type_pk+"__new-ref-id"])
else:
thisFRRT.form_type_reference = None
#Add the user information
thisFRRT.modified_by = request.user
#save the newly edited FormRecordReferenceType
thisFRRT.save()
logging.info("TYPE NEW? : "+str(thisFRRT) + " | " + str(thisFRRT.form_type_reference))
#now loop through all attached record reference values and attempt to attach them to the new form type form_names
for aFRRV in thisFRRT.formrecordreferencevalue_set.all():
#Add the user information
aFRRV.modified_by = request.user
#Once again, if the form reference type FRRT was set as "None" then we need to set its FRRV's as None as well
if thisFRRT.form_type_reference == None:
aFRRV.record_reference.clear()
aFRRV.save()
#Otherwise, perform the lookup on the given external key value to look up
else:
#Now let's find the matching form of this newly designated form_type if it exists
#And remember--Doh! Because the external key value can contain multiple values separated by comma, we need to take that into account
refValues = aFRRV.external_key_reference.split(",")
for value in refValues:
referenceLookup = thisFRRT.form_type_reference.form_set.filter(form_name=value)
logging.info(referenceLookup.count())
if referenceLookup.count() > 0:
aFRRV.record_reference.add(referenceLookup[0])
aFRRV.save()
# $$SS-VALIDATION$$ There's no need to delete anything--we aren't converting entity types--just changing values.
#--What we do need to do however, is ensure that the hidden reference field is not used(it's only used for new fields--not old ones)
#--We have to check this, otherwise if it's iterated over--after this in the post_data, it will revert the change we just made.
#--This should be handled by a 'disabled' tag in the templates, but this is a serverside security measure in case someone
#--hacks the disabled's off in their browser debugger
logging.info("DeletedObjects Adding: " + 'frrt__'+type_pk+"__ref" + " with Count @ : " + str(len(deletedObjects)))
deletedObjects['frrt__'+type_pk+"__ref"] = None
#----------------------------------------------------------------------------------------
# CHECK FOR ANY FLAGGED RECORD REFERENCE TYPES TO BE CONVERTED TO ATTRIBUTE TYPES
#----------------------------------------------------------------------------------------
if (len(splitKey) == 3):
code,type_pk,instruction = splitKey
if code == "frrt" and instruction == "is-new-att":
#We need to make a new attribute type, label it with the reference label, and then loop through all the ref values
#and convert the external key ids to the new attribute values
oldFRRT = FormRecordReferenceType.objects.get(pk=type_pk)
newFRAT = FormRecordAttributeType()
newFRAT.record_type = oldFRRT.record_type
newFRAT.form_type = oldFRRT.form_type_parent
newFRAT.order_number = oldFRRT.order_number
newFRAT.project = oldFRRT.project
newFRAT.is_public = oldFRRT.is_public
#Add the user information
newFRAT.modified_by = request.user
newFRAT.created_by = oldFRRT.created_by
newFRAT.save()
#Now loop through all the FRRVs
for FRRV in oldFRRT.formrecordreferencevalue_set.all():
newFRAV = FormRecordAttributeValue()
newFRAV.record_value = FRRV.external_key_reference
newFRAV.date_created = FRRV.date_created
newFRAV.date_last_modified = FRRV.date_last_modified
newFRAV.record_attribute_type = newFRAT
newFRAV.form_parent = FRRV.form_parent
newFRAV.project = FRRV.project
#Add the user information
newFRAV.modified_by = request.user
newFRAV.created_by = FRRV.created_by
#Save our new FormRecordAttributeValue, and delete our old FormRecordReferenceValue
newFRAV.save()
#--This we DO delete permanently, because it's being converted to another model in the system--no point in recycling it because it can be
#--re-converted back to a FRRV later
FRRV.delete()
#Once this loop is finished, make sure we delete the old FormRecordReferenceType as well
#--This we DO delete permanently, because it's being converted to another model in the system--no point in recycling it because it can be
#--re-converted back to a FRRT later
oldFRRT.delete()
#Add the FRRT to our garbage pile as well
deletedObjects['frrt__'+type_pk+"__ref"] = None
deletedObjects['frrt__'+type_pk] = None
return HttpResponse('{"MESSAGE":"Success!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
else: ERROR_MESSAGE += "Error: You are trying to access the API without using a POST request."
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying form type information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 SAVE_PROJECT_CHANGES()
#=======================================================#
def save_project_changes(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This endpoint takes in POST data submitted by the Admin Project form and makes any project changes to the database
# --Users are handled by a separate form, but basic meta data associated witht he project is stored and modified through this
# --Admin API endpoint
#
# This endpoint also requires level 5 access--ONLY project admins can change any of this information. Everyone else cannot
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
#Only edit the project attached to this User
projectToEdit = request.user.permissions.project
projectToEdit.name = request.POST.get('project_name')
projectToEdit.description = request.POST.get('project_description')
projectToEdit.geojson_string = request.POST.get('project_geojson_string')
projectToEdit.uri_img = request.POST.get('dam_uri_img')
projectToEdit.uri_thumbnail = request.POST.get('dam_uri_thumb')
projectToEdit.uri_download = request.POST.get('dam_uri_download')
projectToEdit.uri_upload = request.POST.get('dam_uri_upload')
projectToEdit.uri_upload_key = request.POST.get('dam_upload_key')
#Add the user information
projectToEdit.modified_by = request.user
projectToEdit.save()
return HttpResponse('{"MESSAGE":"Success!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You must use POST to access this endpoint"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#========================================================#
# ACCESS LEVEL : 4 RUN_GEOJSON_IMPORTER()
#=======================================================#
def run_geojson_importer(self, request):
#******************************************#
ACCESS_LEVEL = 4
PROJECT = request.user.permissions.project
#******************************************#
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
# This API Endpoint takes an argument for a gejson file and creates a new formtype for the data. #Coded this on board a plane to Chicago--
# --first written out on paper at the classy PHL airport over 2 well-deserved Golden Monkey Ales. Geojson is a standardized format of
# --geospatial and associated meta data(attribute data). This will need some tinkering I expect, but 'should' work with a few adjustments
# --Also--surprisingly I figured this out in ~30 lines of code which is pretty impressive if I do say so myself.
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
print >>sys.stderr, "Starting"
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"row_index":"0","row_total":"0","is_complete":"False","row_timer":"0"}')
progressData.save()
listOfFRATS = {}
geojson = request.POST['geojson']
geojson = json.loads(geojson)
print >>sys.stderr, "json Loaded"
#create the new formtype
newFormtype = FormType()
newFormtype.form_type_name = geojson['name']
newFormtype.geojson_projection = json.dumps(geojson['crs'])
newFormtype.project = PROJECT
newFormtype.type = 0
newFormtype.save()
totalFeatures = len(geojson['features'])
featureCounter = 0
counterIncrement = 100.0 / totalFeatures #0.00135
startTimer = time.clock()
endTimer = time.clock()
for aFeature in geojson['features']:
#get current percentage of completion
endTimer = time.clock()
#print >>sys.stderr, "New Form : " + str(featureCounter) + " out of " +str(totalFeatures)+ " rows finished! " + str(counterIncrement)
progressData.jsonString = '{"row_index":"'+str(featureCounter)+'","is_complete":"False", "row_timer":"'+str(endTimer-startTimer)+'", "row_total":"'+str(totalFeatures)+'", "percent_done":"'+str(int(featureCounter*counterIncrement))+'"}'
progressData.is_complete = False
progressData.save()
startTimer = time.clock()
featureCounter += 1
newForm = Form()
newForm.form_geojson_string = json.dumps(aFeature['geometry'])
print >>sys.stderr, str(newForm.form_geojson_string)
newForm.form_type = newFormtype
newForm.save()
for att_label, att_value in aFeature['properties'].iteritems():
#print >> sys.stderr, str(att_label) + " : " + str(att_value)
#check if the current attribute is our specified form ID
if att_label in request.POST:
#print >>sys.stderr, "Found our Label! ^^^^^^^^" + str(att_value) + " : " + aFeature['properties'][att_label]
newForm.form_name = str(att_value)
newForm.save()
else:
#Otherwise let's makes FRATs for the formtype
if att_label in listOfFRATS:
#the FRAT already exists so set our current FRAT to the one that matches
newFRAT = FormRecordAttributeType.objects.get(pk=listOfFRATS[att_label])
else:
#it's a new attribute type(so we're in the first loop iteration)
newFRAT = FormRecordAttributeType()
newFRAT.record_type = att_label
newFRAT.form_parent = newForm
newFRAT.form_type = newFormtype
newFRAT.order_number = random.randint(1, 999)
newFRAT.save()
listOfFRATS[att_label] = newFRAT.pk
#Make the FRAV for our new form
newFRAV = FormRecordAttributeValue()
newFRAV.record_value = str(att_value)
newFRAV.record_attribute_type = newFRAT
newFRAV.form_parent = newForm
newFRAV.save()
progressData.jsonString = '{"row_index":"'+str(featureCounter)+'","is_complete":"True", "row_total":"'+str(totalFeatures)+', "percent_done":"100"}'
progressData.is_complete = True
progressData.save()
return HttpResponse('{"MESSAGE":"Finished the import!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#========================================================#
# ACCESS LEVEL : 4 RUN_NEW_RTYPE_IMPORTER()
#=======================================================#
def run_new_rtype_importer(self, request):
#******************************************#
ACCESS_LEVEL = 4
PROJECT = request.user.permissions.project
#******************************************#
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
# This API Endpoint takes an argument for a CSV file, HttpContext(e.g. context kwargs passed to the HttpResponse like pk values)........................
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"row_index":"0","row_total":"0","is_complete":"False","row_timer":"0"}')
progressData.save()
#kwargs.update({'uuid':progressData.pk})
post_data = request.POST
#timerA = time.clock()
#print >>sys.stderr, "Starting Clock: " + str(timerA)
#Make sure we escape the newline characters from the json string--jscript didn't do it automatically when concatenating the rows together in the clinet-side script
#We also have to replace all \t 's in the json strings before loading them because JSON doesn't allow literal TABS --we need to escape them with a "\\"
print >> sys.stderr, post_data.get('csv_json').encode('utf-8').replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n')
csv_json = json.loads(post_data.get('csv_json').encode('utf-8').replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n'))
print >> sys.stderr, post_data
#setup Dictionaries for post import self-referential needs
#setup a dict for hierarchy value
hierarchyDict = {}
#setup a recordreferencevalue dictionary for the form type if a particular reference is self-referencing to this same form type
selfReferenceList = []
#Get our current FormType
currentFormType = FormType.objects.all().filter(project=request.user.permissions.project, pk=request.POST['formtype_pk'])[0]
#Each row in the CSV file represents a new 'Form' of the 'currentFormType'
#Let's make a 'row' counter to help with indexing through the CSV file
row_index = 0
#Let's make an incremental counter for record type orders
order_counter = 1;
#I'm also going to make a List() of AttributeTypes/ReferenceTypes. This is done so that
#after 1 iteration of the importer loop, the reference types/ attribute types are already created. We
#don't need to create them for every row--so after the first row, we reference this list for the reference
# and attribute values
typeList = {}
print >> sys.stderr, "Just making sure things are working still....where's the stop point?"
main_ID_Field = ""
keepAliveTimer = time.clock()
#print >>sys.stderr, "Starting row loop: " + str(timerB) + " Time elapsed = " + str(timerB-timerA)
#For each row of the CSV
for row in csv_json:
print >> sys.stderr, "222 Just making sure things are working still....where's the stop point?"
timerBeginRow = time.clock()
#print >>sys.stderr, "Starting a new row: " + str(timerBeginRow)
#If we are past index '0' then let's continue with the rest of the importer
#Let's get the main ID
if row_index == 0:
for key, value in row.iteritems():
if 'record__'+key+'__ismainID' in post_data:
main_ID_Field = key
break
currentForm = Form.objects.all().filter(project=request.user.permissions.project, form_type=currentFormType, form_name=row[main_ID_Field])
if currentForm.exists():
currentForm = currentForm[0]
else:
print >>sys.stderr, "Skipping Form--Could not find form name:" + row[main_ID_Field]
continue
for key, value in row.iteritems():
print >>sys.stderr, key
if key != main_ID_Field:
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD REFERENCE TYPE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#------------------------------------------------------------------------------------------------------------------------
#Test if it is a reference type by checking the POST_DATA if 'record__(n)__isreference' exists
#If it is a reference Type:
if 'record__'+str(key)+'__isreference' in post_data:
#We want to make sure we only create the ReferenceType's once--otherwise we populate the database with several
#unecessary copies and relations that muddy everything. So if we're past the first row/iteration of the JSON, the reference types are
#already created and stored in a list to reference after
if row_index < 1:
#create a new FormRecordReferenceType and set "record_type" variable to the header column user-given name value
newFormRecordReferenceType = FormRecordReferenceType()
newFormRecordReferenceType.project = PROJECT
newFormRecordReferenceType.is_public = False
newFormRecordReferenceType.record_type = post_data.get('record__'+str(key)+'__name')
#also set "form_type_parent" to the current formType we are importing
newFormRecordReferenceType.form_type_parent = currentFormType
#now set "form_type_reference" to the selected FormTypeReference value in the current importer Column
#if the value == 'default' then set reference to this same FormType
if post_data.get('record__'+str(key)+'__reftype') == 'default':
newFormRecordReferenceType.form_type_reference = currentFormType
#otherwise set it to the given pk value of a FormType object
else:
newFormRecordReferenceType.form_type_reference = FormType.objects.get(pk=post_data.get('record__'+str(key)+'__reftype'))
#Set an arbitrary initial order for the type
newFormRecordReferenceType.order_number = order_counter
order_counter += 1
#save the Record Reference Type
newFormRecordReferenceType.save()
#add it to the list so that the reference value can reference it
typeList[key] = newFormRecordReferenceType
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD REFERENCE VALUE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------------------------------------------------------
#If this form already has a FRRV for this importer(e.g. we are running multiple rows with the same ID) then just reference
# --that existing FRRV and add the new reference, as well as the ext_key
possibleFRRV = currentForm.ref_to_parent_form.all().filter(record_reference_type=typeList[key])
if possibleFRRV.exists():
newFormRecordReferenceValue = possibleFRRV[0]
newFormRecordReferenceValue.external_key_reference += "," + value
newFormRecordReferenceValue.save()
else:
#Create a new RecordReferenceValue
newFormRecordReferenceValue = FormRecordReferenceValue()
newFormRecordReferenceValue.project = PROJECT
newFormRecordReferenceValue.is_public = False
#set the "external_key_reference" to the column value of the csv row
newFormRecordReferenceValue.external_key_reference = value
#set the "form_parent" to the current row's Form
newFormRecordReferenceValue.form_parent = currentForm
#set the "record_reference_type" to the current RecordReferenceType
logging.info("line626 " + str(typeList[key].form_type_reference) + " :: " + newFormRecordReferenceValue.external_key_reference)
newFormRecordReferenceValue.record_reference_type = typeList[key]
#save the value to give it a pk value
newFormRecordReferenceValue.save()
#logging.info("We are about to check the reference for: " + str(newFormRecordReferenceValue))
#If this reference is self-referencing to the same form formtype we're importing, then similar to the hierchy references,
#--we need to store a list of the reference value objects to load once the entire form type has been imported. We don't need key values because
#--the external key reference is already saved for the lookup on the model.
#--I'm using the objects rather pk values because that will save us time on SQL queries later
if post_data.get('record__'+str(key)+'__reftype') == 'default':
selfReferenceList.append(newFormRecordReferenceValue)
else:
#Now we need to set the value for "record_reference" which will involve a query
#And since the external key could contain multiple values, we need to split them by the comma delimeter
#logging.info(newFormRecordReferenceValue.external_key_reference + " : BEFORE SPLIT")
possibleRefValues = newFormRecordReferenceValue.external_key_reference.split(",")
#logging.info(str(possibleRefValues) + " : SPLIT")
#clear our list of refs everytime to ensure we don't double add--it will use the ext key to find them
# --this isn't the fastest way of doing this, but I need a quick fix for now !!!TODO
newFormRecordReferenceValue.record_reference.clear()
#for all forms in the selected FormType reference
for aForm in newFormRecordReferenceValue.record_reference_type.form_type_reference.form_set.all().prefetch_related():
#if the current external ID value == to the iterated forms "form_num"
#Make sure we convert the INT form-num to a STR first or it will fail the check
for refValue in possibleRefValues:
if refValue == str(aForm.form_number):
#remove this value from future matches to ensure we don't double add it
possibleRefValues.remove(refValue)
#set the current FormRecordReferenceValue.record_reference to the current form in the loop iteration
newFormRecordReferenceValue.record_reference.add(aForm)
#logging.info(newFormRecordReferenceValue.external_key_reference + " : AFTER SPLIT")
#if there are no matches by the last iteration of the loop,
#we can do nothing to leave the record_reference value as "None" (the user can set this later)
#This might happen if the user is importing a new form type that references itself, or references
#another form type that hasn't yet been imported. The external_key_reference's are still saved
#so the user can run another tool to match these keys later once all the Form Types and forms have been
#imported through this tool
#save the RecordReferenceValue
newFormRecordReferenceValue.save()
#timerE = time.clock()
#print >>sys.stderr, "Ending ref lookup: " + str(timerE) + " Time elapsed = " + str(timerE-timerD)
#If it is not a reference type, then we are adding an attribute type instead
else:
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD ATTRIBUTE TYPE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#------------------------------------------------------------------------------------------------------------------------
#We want to make sure we only create the AttributeType's once--otherwise we populate the database with several
#unecessary copies and relations that muddy everything. So if we're past the first row, the attribute types are
#already created and stored in a list to reference after
if row_index < 1:
#create a new FormRecordAttributeType and set "record_type" variable to the header column name
newFormRecordAttributeType = FormRecordAttributeType()
newFormRecordAttributeType.record_type = post_data.get('record__'+str(key)+'__name')
newFormRecordAttributeType.project = PROJECT
newFormRecordAttributeType.is_public = False
#also set "form_type" to the current formType we are importing
newFormRecordAttributeType.form_type = currentFormType
#Set an arbitrary initial order for the type
newFormRecordAttributeType.order_number = order_counter
order_counter += 1
#save the RecordAttributeType
newFormRecordAttributeType.save()
#add the attributeType to the typeList so that the attribute value can reference it
typeList[key] = newFormRecordAttributeType
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD Attribute VALUE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------------------------------------------------------
#If this form already has a FRRV for this importer(e.g. we are running multiple rows with the same ID) then just reference
# --that existing FRRV and add the new reference, as well as the ext_key
possibleFRAV = currentForm.formrecordattributevalue_set.all().filter(record_attribute_type=typeList[key])
if possibleFRAV.exists():
newFormRecordAttributeValue = possibleFRAV[0]
newFormRecordAttributeValue.record_value = newFormRecordAttributeValue.record_value + "," + value
newFormRecordAttributeValue.save()
else:
#Create a new RecordAttributeValue
newFormRecordAttributeValue = FormRecordAttributeValue()
newFormRecordAttributeValue.project = PROJECT
newFormRecordAttributeValue.is_public = False
#set the "record_value" to the column value of the csv row
newFormRecordAttributeValue.record_value = value
#set the "form_parent" to the current row's Form
newFormRecordAttributeValue.form_parent = currentForm
#set the "record_attribute_type" to the current RecordAttributeType
newFormRecordAttributeValue.record_attribute_type = typeList[key]
#save the RecordAttributeValue
newFormRecordAttributeValue.save()
row_index += 1
#Upload our progress data object with the current row
timerFinishRow = time.clock()
#print >>sys.stderr, "Ending a row: " + str(timerF) + " Time elapsed since row start = " + str(timerF-timerC)
#We need to update the progessData model because it is updated by another thread as well
#--Otherwise this will just ignore the 'keep_alive' flag and quit after 2 timer checks
#--I'm not entirely sold on this method--There's a slight....itty bitty...teensy weensy...chance that the other thread
#--might be trying to update the AJAX model at the exact time and will be missed here--but as of now, I can't think of a
#--better solution and I'm REALLY over working on this importer today.
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"row_index":"'+str(row_index)+'","is_complete":"False","row_total":"'+post_data.get('row_total')+'","row_timer":"'+str(timerFinishRow-timerBeginRow)+'"}'
#We want to make sure that our timer is set at 5 second itnervals. The AJAX script sets the keep alive variable to True
# --every 1 second. I've set it to 5 seconds here to account for any delays that might occur over the network.
# --Every 5 seconds, this script resets the keep_alive variable to 'False', if it is already False--that means the user exited
# --the process on their AJAX end so we should stop adding this to the database and delete what we've already done.
#print >>sys.stderr, str(time.clock()) + " - " + str(keepAliveTimer) + " : " + str(progressData.keep_alive)
if time.clock() - keepAliveTimer > 5:
print >> sys.stderr, str (time.clock() - keepAliveTimer) + " : We are at the 5 second interval! " + str(row_index)
#restart the keepAlive timer to the current time
keepAliveTimer = time.clock()
#delete the data if the user's AJAX end is unresponsive
if progressData.keep_alive == False:
print >> sys.stderr, "We are deleting our progress now--wish us luck!"
#TODO Delete all formtypes FRAT/FRRTs that we just tried making
progressData.delete()
#break from loop
break
else:
progressData.keep_alive = False
progressData.save()
#Now Update the hierchical references if they exist
#This forloop will only run if the hierarchyDict has been appended to already
for key, value in hierarchyDict.iteritems():
formToModify = Form.objects.get(pk=key)
try:#Essentially we are trying to grab the form with the given form_name. If no match is found--the TRY statement will leave it as NoneType
formToModify.hierarchy_parent = Form.objects.all().filter(form_name=value)[0]
#print >> sys.stderr, "Admin: Line 681: WHAT'S The Name?: " + formToModify.hierarchy_parent
formToModify.save()
except:
print >>sys.stderr, "No Hierarchy Match found."
#Now Update the self references if they exist
#This forloop will only run if the selfReferenceList has been populated
for refValue in selfReferenceList:
#Remember that some external key references may be multi-values that are comma seperated, so let's try splitting them by comma
#--and looping through them appropriately
key_list = refValue.external_key_reference.split(',')
for aKey in key_list:
try:#Essentially we are trying to grab the form with the given external ID by form_name. If no match is found--the TRY statement will leave it as NoneType
refValue.record_reference.add(Form.objects.all().filter(form_name=aKey)[0])
refValue.save()
except:
print >>sys.stderr, "No Ref Match found."
print >> sys.stderr, "333 Just making sure things are working still....where's the stop point?"
#When we are fininshed, update the progressData to show that
progressData.jsonString = '{"row_index":"'+str(row_index)+'","is_complete":"True", "row_total":"'+post_data.get('row_total')+'"}'
progressData.is_complete = True
progressData.save()
return HttpResponse('{"MESSAGE":"Finished the import!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RUN_FORM_TYPE_IMPORTER()
#=======================================================#
def run_form_type_importer(self, request):
#******************************************#
ACCESS_LEVEL = 4
PROJECT = request.user.permissions.project
#******************************************#
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
# This API Endpoint takes an argument for a CSV file, HttpContext(e.g. context kwargs passed to the HttpResponse like pk values),
# -->and finally the POST data submitted by the form_type_importer view. It will match POST column header data customized by
# -->the user to columns in the CSV file and automatically generate, both a new FormType, and a new Form for each row of the
# -->CSV file with all the necessary RecordAttribute/ReferenceType's and Values
#
# *This function is the bread and butter of importing legacy or foreign database data into the system through CSV files
# *It uses a CSV file that has been converted into JSON of key:value pairs and passed as a POST argument
# *This is done as an AJAX request to show progresss of the database import
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
#We have the column headers saved in a coded format in the passed POST header argument 'post_data'
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
# form_type_name --> On the Import Form, this is the name of the new Form Type
#
# *Where (n) is the associated key value for the original header for each row of CSV data, e.g. if n == Object No, then the value in the csv file
# *row for the key "Object No" will match
# record__(n)__name --> This represents the RecordAttribute/ReferenceType name field for the model
# record__(n)__reftype --> This represents the RecordReferenceType referenced FormType is applicable
# record__(n)__ismainID --> This is a fake Bool value. It either exists, which means this particular (n) column is to be used for the form_num/form_name field
# -->or it isn't added to the POST data because it wasn't selected and therefore does not exist, and therefore this particular column
# -->is a RecordAttributeType rather than a RecordReferenceType
# record__(n)__isreference --> This is a fake Bool value. It either exists, which means the particular (n) column is to be treated as a RecordReferenceType
# -->or it isn't added to the POST data because it wasn't selected and therefore does not exist, and therefore this particular column
# -->is a RecordAttributeType rather than a RecordReferenceType
#
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"row_index":"0","row_total":"0","is_complete":"False","row_timer":"0"}')
progressData.save()
#kwargs.update({'uuid':progressData.pk})
post_data = request.POST
#timerA = time.clock()
#print >>sys.stderr, "Starting Clock: " + str(timerA)
#Make sure we escape the newline characters from the json string--jscript didn't do it automatically when concatenating the rows together in the clinet-side script
#We also have to replace all \t 's in the json strings before loading them because JSON doesn't allow literal TABS --we need to escape them with a "\\"
print >> sys.stderr, post_data.get('csv_json').encode('utf-8').replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n')
csv_json = json.loads(post_data.get('csv_json').encode('utf-8').replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n'))
print >> sys.stderr, post_data
#setup Dictionaries for post import self-referential needs
#setup a dict for hierarchy value
hierarchyDict = {}
#setup a recordreferencevalue dictionary for the form type if a particular reference is self-referencing to this same form type
selfReferenceList = []
#Create a new form type from form_type_name <Input> and attach to current Project #
newFormType = FormType()
#Add the project to the FormType relation 'project' and make sure to use the users PROJECT
newFormType.project = PROJECT
#Add the name of the FormType to 'form_type_name' model field
newFormType.form_type_name = post_data['form_type_name']
#add the appropriate flag for the formtype's hard-coded type: e.g. is it a media type?
#We're checking whether or not the drop down select on the importer form has chosen a 'media type' if it has, then
#--we can assume it's a Media Form Type, and proceed. If it isn't one of the int values for a media type, then it's a normal form type
#--it's also worth noting that Media Form Type's cannot be added to Form Type Groups--they are their own unique Form Type Group
#--The importer will skip the Form Type Group import if it is a Media Type.
if post_data.get('ft_media_type') != '-1': #
newFormType.type = 1
newFormType.media_type = post_data.get('ft_media_type')
else: #we'll assume if none of the media types are selected, that it's just a normal form type and proceed
newFormType.type = 0;
#Update the form type's group
#If it's a new group
if post_data.get('ft_group') == 'NEW':
#Create a new formtype group
newFormTypeGroup = FormTypeGroup(name=post_data.get('ft_group_new'), project=PROJECT)
newFormTypeGroup.save()
newFormType.form_type_group = newFormTypeGroup
#Otherwise it's not a new group and not being removed so use the provided value
elif post_data.get('ft_group') != 'NONE':
newFormType.form_type_group = FormTypeGroup.objects.get(pk=post_data.get('ft_group'))
#update the formtypes status as hierarchical
if 'is_hierarchical' in post_data:
newFormType.is_hierarchical = True
else:
newFormType.is_hierarchical = False
#set privacy of form type
newFormType.is_public = False;
#save the FormType to give it a new pk in the database
newFormType.save()
#Each row in the CSV file represents a new 'Form' of the 'newFormType'
#Let's make a 'row' counter to help with indexing through the CSV file
row_index = 0
#Let's make an incremental counter for record type orders
order_counter = 1;
#I'm also going to make a List() of AttributeTypes/ReferenceTypes. This is done so that
#after 1 iteration of the importer loop, the reference types/ attribute types are already created. We
#don't need to create them for every row--so after the first row, we reference this list for the reference
# and attribute values
typeList = {}
print >> sys.stderr, "Just making sure things are working still....where's the stop point?"
keepAliveTimer = time.clock()
#print >>sys.stderr, "Starting row loop: " + str(timerB) + " Time elapsed = " + str(timerB-timerA)
#For each row of the CSV
for row in csv_json:
#print >> sys.stderr, "222 Just making sure things are working still....where's the stop point?"
timerBeginRow = time.clock()
#print >>sys.stderr, "Starting a new row: " + str(timerBeginRow)
#If we are past index '0' then let's continue with the rest of the importer
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ CREATE NEW FORM @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-----------------------------------------------------------------------------------------------------------
#Create a new Form and attach the newly created 'FormType' to 'form_type' in the 'Form' model
newForm = Form()
newForm.form_type = newFormType
newForm.project = PROJECT
newForm.is_public = False
#we will worry about adding the form_name / form_number later
#save the Form to give it a pk value in the database. Now we can use it for variable assignments later
newForm.save()
#For each column in the CSV Row and the column headers (essentially all the dict/JSON key values
#We setup a bool test to determine if we find a primary id that is selected or not.
#--if we don't find a primary id by the time we end the list, set the form's name to the current row counter number
foundAMainID = False
for key, value in row.iteritems():
#timerJ = time.clock()
#print >>sys.stderr, "Starting col loop: " + str(timerJ)
#First check if this column is the unique ID for this form
#we'll see if it is by checking the POST_DATA if 'record__(n)__ismainID' exists
if 'record__'+str(key)+'__ismainID' in post_data:
#If it is, then add this column value to the current Form's "form_number" or "form_name"
#Try to add it as an int first, otherwise add it as the form name
foundAMainID = True
try:
newForm.form_number = int(value)
newForm.form_name = value
except:
newForm.form_name = value
#save the Form
newForm.save()
#If it is not the ID field:
#If the current column is the value to reference a hierarchy field then add it to our hierarchy Dict
#--we will process this later, because if we try now, not all of the self-referencing forms will be imported yet
#--and this will more than likely miss a number of them
elif 'record__'+str(key)+'__ishierarchy' in post_data:
#We add the current Form's pk value for the key, and the reference pk as the value
hierarchyDict[str(newForm.pk)] = value
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD REFERENCE TYPE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#------------------------------------------------------------------------------------------------------------------------
#Test if it is a reference type by checking the POST_DATA if 'record__(n)__isreference' exists
#If it is a reference Type:
elif 'record__'+str(key)+'__isreference' in post_data:
#We want to make sure we only create the ReferenceType's once--otherwise we populate the database with several
#unecessary copies and relations that muddy everything. So if we're past the first row/iteration of the JSON, the reference types are
#already created and stored in a list to reference after
if row_index < 1:
#create a new FormRecordReferenceType and set "record_type" variable to the header column user-given name value
newFormRecordReferenceType = FormRecordReferenceType()
newFormRecordReferenceType.project = PROJECT
newFormRecordReferenceType.is_public = False
newFormRecordReferenceType.record_type = post_data.get('record__'+str(key)+'__name')
#also set "form_type_parent" to the current formType we are importing
newFormRecordReferenceType.form_type_parent = newFormType
#now set "form_type_reference" to the selected FormTypeReference value in the current importer Column
#if the value == 'default' then set reference to this same FormType
if post_data.get('record__'+str(key)+'__reftype') == 'default':
newFormRecordReferenceType.form_type_reference = newFormType
#otherwise set it to the given pk value of a FormType object
else:
newFormRecordReferenceType.form_type_reference = FormType.objects.get(pk=post_data.get('record__'+str(key)+'__reftype'))
#Set an arbitrary initial order for the type
newFormRecordReferenceType.order_number = order_counter
order_counter += 1
#save the Record Reference Type
newFormRecordReferenceType.save()
#add it to the list so that the reference value can reference it
typeList[key] = newFormRecordReferenceType
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD REFERENCE VALUE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------------------------------------------------------
#Create a new RecordReferenceValue
newFormRecordReferenceValue = FormRecordReferenceValue()
newFormRecordReferenceValue.project = PROJECT
newFormRecordReferenceValue.is_public = False
#set the "external_key_reference" to the column value of the csv row
newFormRecordReferenceValue.external_key_reference = value
#set the "form_parent" to the current row's Form
newFormRecordReferenceValue.form_parent = newForm
#set the "record_reference_type" to the current RecordReferenceType
logging.info("line626 " + str(typeList[key].form_type_reference) + " :: " + newFormRecordReferenceValue.external_key_reference)
newFormRecordReferenceValue.record_reference_type = typeList[key]
#save the value to give it a pk value
newFormRecordReferenceValue.save()
#logging.info("We are about to check the reference for: " + str(newFormRecordReferenceValue))
#If this reference is self-referencing to the same form formtype we're importing, then similar to the hierchy references,
#--we need to store a list of the reference value objects to load once the entire form type has been imported. We don't need key values because
#--the external key reference is already saved for the lookup on the model.
#--I'm using the objects rather pk values because that will save us time on SQL queries later
if post_data.get('record__'+str(key)+'__reftype') == 'default':
selfReferenceList.append(newFormRecordReferenceValue)
else:
#Now we need to set the value for "record_reference" which will involve a query
#And since the external key could contain multiple values, we need to split them by the comma delimeter
#logging.info(newFormRecordReferenceValue.external_key_reference + " : BEFORE SPLIT")
possibleRefValues = newFormRecordReferenceValue.external_key_reference.split(",")
#logging.info(str(possibleRefValues) + " : SPLIT")
#for all forms in the selected FormType reference
for aForm in newFormRecordReferenceValue.record_reference_type.form_type_reference.form_set.all().prefetch_related():
#if the current external ID value == to the iterated forms "form_num"
#Make sure we convert the INT form-num to a STR first or it will fail the check
for refValue in possibleRefValues:
if refValue == str(aForm.form_number):
#remove this value from future matches to ensure we don't double add it
possibleRefValues.remove(refValue)
#set the current FormRecordReferenceValue.record_reference to the current form in the loop iteration
newFormRecordReferenceValue.record_reference.add(aForm)
#logging.info(newFormRecordReferenceValue.external_key_reference + " : AFTER SPLIT")
#if there are no matches by the last iteration of the loop,
#we can do nothing to leave the record_reference value as "None" (the user can set this later)
#This might happen if the user is importing a new form type that references itself, or references
#another form type that hasn't yet been imported. The external_key_reference's are still saved
#so the user can run another tool to match these keys later once all the Form Types and forms have been
#imported through this tool
#save the RecordReferenceValue
newFormRecordReferenceValue.save()
#timerE = time.clock()
#print >>sys.stderr, "Ending ref lookup: " + str(timerE) + " Time elapsed = " + str(timerE-timerD)
#If it is not a reference type, then we are adding an attribute type instead
else:
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD ATTRIBUTE TYPE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#------------------------------------------------------------------------------------------------------------------------
#We want to make sure we only create the AttributeType's once--otherwise we populate the database with several
#unecessary copies and relations that muddy everything. So if we're past the first row, the attribute types are
#already created and stored in a list to reference after
if row_index < 1:
#create a new FormRecordAttributeType and set "record_type" variable to the header column name
newFormRecordAttributeType = FormRecordAttributeType()
newFormRecordAttributeType.record_type = post_data.get('record__'+str(key)+'__name')
newFormRecordAttributeType.project = PROJECT
newFormRecordAttributeType.is_public = False
#also set "form_type" to the current formType we are importing
newFormRecordAttributeType.form_type = newFormType
#Set an arbitrary initial order for the type
newFormRecordAttributeType.order_number = order_counter
order_counter += 1
#save the RecordAttributeType
newFormRecordAttributeType.save()
#add the attributeType to the typeList so that the attribute value can reference it
typeList[key] = newFormRecordAttributeType
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD Attribute VALUE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------------------------------------------------------
#Create a new RecordAttributeValue
newFormRecordAttributeValue = FormRecordAttributeValue()
newFormRecordAttributeValue.project = PROJECT
newFormRecordAttributeValue.is_public = False
#set the "record_value" to the column value of the csv row
newFormRecordAttributeValue.record_value = value
#set the "form_parent" to the current row's Form
newFormRecordAttributeValue.form_parent = newForm
#set the "record_attribute_type" to the current RecordAttributeType
newFormRecordAttributeValue.record_attribute_type = typeList[key]
#save the RecordAttributeValue
newFormRecordAttributeValue.save()
#timerK = time.clock()
#print >>sys.stderr, "End of col loop: " + str(timerK) + " Time elapsed = " + str(timerK-timerJ)
#If we didn't find a primary key for this row/form, then add the rox index as the incremental form name/number
if foundAMainID == False:
newForm.form_number = int(row_index+1)
newForm.form_name = str(row_index+1)
newForm.save()
foundAMainID = False
row_index += 1
#Upload our progress data object with the current row
timerFinishRow = time.clock()
#print >>sys.stderr, "Ending a row: " + str(timerF) + " Time elapsed since row start = " + str(timerF-timerC)
#We need to update the progessData model because it is updated by another thread as well
#--Otherwise this will just ignore the 'keep_alive' flag and quit after 2 timer checks
#--I'm not entirely sold on this method--There's a slight....itty bitty...teensy weensy...chance that the other thread
#--might be trying to update the AJAX model at the exact time and will be missed here--but as of now, I can't think of a
#--better solution and I'm REALLY over working on this importer today.
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"row_index":"'+str(row_index)+'","is_complete":"False","row_total":"'+post_data.get('row_total')+'","row_timer":"'+str(timerFinishRow-timerBeginRow)+'"}'
#We want to make sure that our timer is set at 5 second itnervals. The AJAX script sets the keep alive variable to True
# --every 1 second. I've set it to 5 seconds here to account for any delays that might occur over the network.
# --Every 5 seconds, this script resets the keep_alive variable to 'False', if it is already False--that means the user exited
# --the process on their AJAX end so we should stop adding this to the database and delete what we've already done.
#print >>sys.stderr, str(time.clock()) + " - " + str(keepAliveTimer) + " : " + str(progressData.keep_alive)
if time.clock() - keepAliveTimer > 5:
print >> sys.stderr, str (time.clock() - keepAliveTimer) + " : We are at the 5 second interval! " + str(row_index)
#restart the keepAlive timer to the current time
keepAliveTimer = time.clock()
#delete the data if the user's AJAX end is unresponsive
if progressData.keep_alive == False:
print >> sys.stderr, "We are deleting our progress now--wish us luck!"
newFormType.delete()
progressData.delete()
try:
newFormTypeGroup.delete()
except:
#break from loop
break
#break from loop
break
else:
progressData.keep_alive = False
progressData.save()
#Now Update the hierchical references if they exist
#This forloop will only run if the hierarchyDict has been appended to already
for key, value in hierarchyDict.iteritems():
formToModify = Form.objects.get(pk=key)
try:#Essentially we are trying to grab the form with the given form_name. If no match is found--the TRY statement will leave it as NoneType
formToModify.hierarchy_parent = Form.objects.all().filter(form_name=value)[0]
#print >> sys.stderr, "Admin: Line 681: WHAT'S The Name?: " + formToModify.hierarchy_parent
formToModify.save()
except:
print >>sys.stderr, "No Hierarchy Match found."
#Now Update the self references if they exist
#This forloop will only run if the selfReferenceList has been populated
for refValue in selfReferenceList:
#Remember that some external key references may be multi-values that are comma seperated, so let's try splitting them by comma
#--and looping through them appropriately
key_list = refValue.external_key_reference.split(',')
for aKey in key_list:
try:#Essentially we are trying to grab the form with the given external ID by form_name. If no match is found--the TRY statement will leave it as NoneType
refValue.record_reference.add(Form.objects.all().filter(form_name=aKey)[0])
refValue.save()
except:
print >>sys.stderr, "No Ref Match found."
print >> sys.stderr, "333 Just making sure things are working still....where's the stop point?"
#When we are fininshed, update the progressData to show that
progressData.jsonString = '{"row_index":"'+str(row_index)+'","is_complete":"True", "row_total":"'+post_data.get('row_total')+'"}'
progressData.is_complete = True
progressData.save()
return HttpResponse('{"MESSAGE":"Finished the import!"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 4 RUN_FORM_TYPE_FORM_IMPORTER()
#=======================================================#
def run_form_type_form_importer(self, request):
#******************************************#
ACCESS_LEVEL = 4
PROJECT = request.user.permissions.project
#******************************************#
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
# This API Endpoint takes an argument for a CSV file, HttpContext(e.g. context kwargs passed to the HttpResponse like pk values),
# -->and finally the POST data submitted by the formtype_form_importer view. It will match POST column header data customized by
# -->the user to columns in the CSV file and automatically generate, both a new FormType, and a new Form for each row of the
# -->CSV file with all the necessary RecordAttribute/ReferenceType's and Values
#
# *This function is for importing a separate CSV of "NEW" forms to an existing formtype with the same RTYPES in the header
# --if the RTYPE doesn't exist, it will cease the import and delete everything it made upto that point
# *It uses a CSV file that has been converted into JSON of key:value pairs and passed as a POST argument
# *This is done as an AJAX request to show progresss of the database import
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
#We have the column headers saved in a coded format in the passed POST header argument 'post_data'
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
#
#
# *Where (n) is the associated key value for the original header for each row of CSV data, e.g. if n == Object No, then the value in the csv file
# *row for the key "Object No" will match
# record__(n)__name --> This represents the RecordAttribute/ReferenceType name field for the model
# record__(n)__reftype --> This represents the RecordReferenceType referenced FormType is applicable
# record__(n)__ismainID --> This is a fake Bool value. It either exists, which means this particular (n) column is to be used for the form_num/form_name field
# -->or it isn't added to the POST data because it wasn't selected and therefore does not exist, and therefore this particular column
# -->is a RecordAttributeType rather than a RecordReferenceType
# record__(n)__isreference --> This is a fake Bool value. It either exists, which means the particular (n) column is to be treated as a RecordReferenceType
# -->or it isn't added to the POST data because it wasn't selected and therefore does not exist, and therefore this particular column
# -->is a RecordAttributeType rather than a RecordReferenceType
#
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"row_index":"0","row_total":"0","is_complete":"False","row_timer":"0"}')
progressData.save()
#kwargs.update({'uuid':progressData.pk})
post_data = request.POST
#timerA = time.clock()
#print >>sys.stderr, "Starting Clock: " + str(timerA)
#Make sure we escape the newline characters from the json string--jscript didn't do it automatically when concatenating the rows together in the clinet-side script
#We also have to replace all \t 's in the json strings before loading them because JSON doesn't allow literal TABS --we need to escape them with a "\\"
#print >> sys.stderr, post_data.get('csv_json').encode('utf-8').replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n')
print >>sys.stderr, request.POST['csv_json'][0:1000]
print >>sys.stderr, request.POST['csv_json'][7932427:7934427] + " : " + request.POST['csv_json'][7933427]
csv_json = json.loads(post_data.get('csv_json').encode('utf-8').replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n'))
#response = HttpResponse(post_data.get('csv_json'), content_type='text/plain')
#response['Content-Disposition'] = 'attachment; filename="test__'+request.user.username+'.txt"'
#return response
#print >> sys.stderr, post_data
#setup Dictionaries for post import self-referential needs
#setup a dict for hierarchy value
hierarchyDict = {}
#setup a recordreferencevalue dictionary for the form type if a particular reference is self-referencing to this same form type
selfReferenceList = []
currentFormType = FormType.objects.get(pk=request.POST['formtype_pk']);
availableFRATs = currentFormType.formrecordattributetype_set.all()
availableFRRTs = currentFormType.ref_to_parent_formtype.all()
#queue them up in memory
print >>sys.stderr, len(availableFRATs)
print >>sys.stderr, len(availableFRRTs)
#Make a dictionary based with the RTYPE names and their objects as values
FRATs = {}
FRRTs = {}
for frat in availableFRATs:
FRATs[frat.record_type] = frat
for frrt in availableFRRTs:
FRRTs[frrt.record_type] = frrt
print >>sys.stderr, FRATs
# $$$SECURITY$$$ If the formtype requested is not part of this user's project, then exist immediately with a warning
if currentFormType.project.pk == PROJECT.pk:
#Let's keep track of all the forms we make to delete them all if anything goes wrong
allNewForms = []
#Each row in the CSV file represents a new 'Form' of the 'currentFormType'
#Let's make a 'row' counter to help with indexing through the CSV file
row_index = 0
#Let's make an incremental counter for record type orders
order_counter = 1
print >> sys.stderr, "Just making sure things are working still....where's the stop point?"
keepAliveTimer = time.clock()
try:
#For each row of the CSV
for row in csv_json:
row_index += 1
#print >> sys.stderr, "222 Just making sure things are working still....where's the stop point?"
timerBeginRow = time.clock()
#print >>sys.stderr, "Starting a new row: " + str(timerBeginRow)
#If we are past index '0' then let's continue with the rest of the importer
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ CREATE NEW FORM @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-----------------------------------------------------------------------------------------------------------
#Create a new Form and attach the newly created 'FormType' to 'form_type' in the 'Form' model
newForm = Form()
newForm.form_type = currentFormType
newForm.project = PROJECT
newForm.is_public = False
#we will worry about adding the form_name / form_number later
#save the Form to give it a pk value in the database. Now we can use it for variable assignments later
newForm.save()
print >>sys.stderr, "NEW Form PK" + str(newForm.pk)
#Add this form to our form list to delete if anything goes wrong
allNewForms.append(newForm)
col_index = 0
#For each column in the CSV Row and the column headers (essentially all the dict/JSON key values
#We setup a bool test to determine if we find a primary id that is selected or not.
foundAMainID = False
for key, value in row.iteritems():
col_index += 1
#timerJ = time.clock()
#print >>sys.stderr, "Starting col loop: " + str(key) + " : " + str(value)
#First check if this column is the unique ID for this form
#we'll see if it is by checking the POST_DATA if 'record__(n)__ismainID' exists
if 'record__'+str(key)+'__ismainID' in post_data:
print >>sys.stderr, "FOUND MAIN ID: " + key
#If it is, then add this column value to the current Form's "form_name"
foundAMainID = True
newForm.form_name = value
#save the Form
newForm.save()
#If it is not the ID field:
else:
#Now we need to check if there is a matching FRAT with the attribute header name
try:
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD Attribute VALUE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------------------------------------------------------
#Create a new RecordAttributeValue
currentKey = request.POST['record__'+str(key)+'__name']
#print >>sys.stderr, currentKey
newFormRecordAttributeValue = FormRecordAttributeValue()
newFormRecordAttributeValue.project = PROJECT
newFormRecordAttributeValue.is_public = False
#set the "record_value" to the column value of the csv row
newFormRecordAttributeValue.record_value = value
#set the "form_parent" to the current row's Form
newFormRecordAttributeValue.form_parent = newForm
#set the "record_attribute_type" to the current RecordAttributeType
newFormRecordAttributeValue.record_attribute_type = FRATs[currentKey]
#save the RecordAttributeValue
newFormRecordAttributeValue.save()
#If there isn't a matching FRAT, then look for a matching FRRT with the same header label
except:
try:
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ADD A RECORD REFERENCE VALUE @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------------------------------------------------------
#Create a new RecordReferenceValue
newFormRecordReferenceValue = FormRecordReferenceValue()
newFormRecordReferenceValue.project = PROJECT
newFormRecordReferenceValue.is_public = False
#set the "external_key_reference" to the column value of the csv row
newFormRecordReferenceValue.external_key_reference = value
#set the "form_parent" to the current row's Form
newFormRecordReferenceValue.form_parent = newForm
#set the "record_reference_type" to the current RecordReferenceType
newFormRecordReferenceValue.record_reference_type = FRRTs[request.POST['record__'+str(key)+'__name']]
#store the references to other forms
possibleRefValues = newFormRecordReferenceValue.external_key_reference.split(",")
#save the value to give it a pk value
newFormRecordReferenceValue.save()
#if the current external ID value == to the iterated forms "form_num"
#Make sure we convert the INT form-num to a STR first or it will fail the check
for refValue in possibleRefValues:
possibleFormMatch = newFormRecordReferenceValue.record_reference_type.form_type_reference.form_set.filter(form_name=refValue)
if possibleFormMatch.exists():
#remove this value from future matches to ensure we don't double add it
possibleRefValues.remove(refValue)
#set the current FormRecordReferenceValue.record_reference to the current form in the loop iteration
#no need to resave because it is saved automatically with the add() command
newFormRecordReferenceValue.record_reference.add(possibleFormMatch[0])
#If there are no matches, escape early and delete all currently created forms.
except:
print >>sys.stderr, "QUITTING"
col_index += 1
#Upload our progress data object with the current row
timerFinishRow = time.clock()
#print >>sys.stderr, "Ending a row: " + str(timerF) + " Time elapsed since row start = " + str(timerF-timerC)
#We need to update the progessData model because it is updated by another thread as well
#--Otherwise this will just ignore the 'keep_alive' flag and quit after 2 timer checks
#--I'm not entirely sold on this method--There's a slight....itty bitty...teensy weensy...chance that the other thread
#--might be trying to update the AJAX model at the exact time and will be missed here--but as of now, I can't think of a
#--better solution and I'm REALLY over working on this importer today.
progressData = AJAXRequestData.objects.get(pk=progressData.pk)
progressData.jsonString = '{"row_index":"'+str(row_index)+'","is_complete":"False","row_total":"'+post_data.get('row_total')+'","row_timer":"'+str(timerFinishRow-timerBeginRow)+'"}'
#We want to make sure that our timer is set at 5 second itnervals. The AJAX script sets the keep alive variable to True
# --every 1 second. I've set it to 5 seconds here to account for any delays that might occur over the network.
# --Every 5 seconds, this script resets the keep_alive variable to 'False', if it is already False--that means the user exited
# --the process on their AJAX end so we should stop adding this to the database and delete what we've already done.
#print >>sys.stderr, str(time.clock()) + " - " + str(keepAliveTimer) + " : " + str(progressData.keep_alive)
if time.clock() - keepAliveTimer > 5:
print >> sys.stderr, str (time.clock() - keepAliveTimer) + " : We are at the 5 second interval! " + str(row_index)
#restart the keepAlive timer to the current time
keepAliveTimer = time.clock()
#delete the data if the user's AJAX end is unresponsive
if progressData.keep_alive == False:
print >> sys.stderr, "We are deleting our progress now--wish us luck!"
#This needs to delete ONLY the forms that have been created. Every form created should be added to a temporary list
# --for this to access and delete
for thisForm in allNewForms:
thisForm.delete()
progressData.delete()
#break from loop
ERROR_MESSAGE += "Client unresponsive, shutting down import."
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
else:
progressData.keep_alive = False
progressData.save()
#print >>sys.stderr, "End of col loop: " + str(timerK) + " Time elapsed = " + str(timerK-timerJ)
#If we didn't find a primary key for this row/form, then add the rox index as the incremental form name/number
if foundAMainID == False:
print "using alt ID Number"
newForm.form_name = str(row_index+1)
newForm.save()
foundAMainID = False
print >>sys.stderr, "NEW Form Name" + str(newForm.form_name)
#When we are fininshed, update the progressData to show that
progressData.jsonString = '{"row_index":"'+str(row_index)+'","is_complete":"True", "row_total":"'+post_data.get('row_total')+'"}'
progressData.is_complete = True
progressData.save()
return HttpResponse('{"MESSAGE":"Finished the import!"}',content_type="application/json")
except Exception as e:
print >>sys.stderr, "Something happened!!" + str(type(e)) + " : " + str(e.args) + " : " + str(e)
#Delete all forms in the allFormsList
for thisForm in allNewForms:
thisForm.delete()
print >>sys.stderr, "Something happened!!" + str(type(e)) + " : " + str(e.args) + " : " + str(e)
#create the error message
ERROR_MESSAGE += "Error: Something went wrong. Your action has been logged and sent to the admin" + str(type(e)) + " : " + str(e.args) + " : " + str(e)
else: ERROR_MESSAGE += "Error: You do not have permission to access this form type from a different project. Your action has been logged and sent to the admin"
else: ERROR_MESSAGE += "Error: You do not have permission to access this tool. Your action has been logged and sent to the admin"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 2 BULK_EDIT_FORMTYPE()
#=======================================================#
def bulk_edit_formtype(self, request):
#***************#
ACCESS_LEVEL = 2
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint works in the formtype viewer--it recieves a list of edits based on the form query and processes those edits
# --in bulk. E.g. you can edit the rtype of multiple forms, compared to one at a time in an individual form editor
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
try:
print >> sys.stderr, request.POST
#This will receive post data containing a series of FRAV or FRRVs that need to be edited
#Just an extra bit of security to ensure this only processes POST data
if request.method == 'POST':
counter = 0;
print >> sys.stderr, request.POST
for key in request.POST:
print >>sys.stderr, key
splitkey = key.split('__')
if len(splitkey) > 1:
if splitkey[0] == 'frav':
currentFRAV = FormRecordAttributeValue.objects.get(pk=splitkey[1])
currentFRAV.record_value = request.POST[key]
#Add the user information
currentFRAV.modified_by = request.user
currentFRAV.save()
else:
#Sometimes, if
currentFRRV = FormRecordReferenceValue.objects.get(pk=key.splitkey[1])
#set our external key to this key value
new_external_key = ""
#Empty our list of references, and then add them all new here
currentFRAV.record_reference.clear()
for reference in post_data.getlist(key):
#make sure we add a null check here--the user might not have chosen a referenced form
if reference != '' or reference != None:
currentFRAV.record_reference.add(Form.objects.get(pk=reference))
new_external_key += str(reference) + ","
#remove the trailing comma
external_key_reference[:-1]
counter += 1
return HttpResponse('{"message":"Succesfully updated:'+ str(counter) +' field(s) in the database"}', content_type="application/json")
except Exception as e:
ERROR_MESSAGE += '"Something happened and the fields did not update in the database. See Error| '+str(e)+'"'
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 MODIFY_PROJECT_USER()
#=======================================================#
def modify_project_user(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
# :::This function is an admin API Endpoint that accepts json data in POST (and ONLY post) and returns a string of JSON through AJAX
#
# !!!!!! It is ESSENTIAL that we create tight security here.!!!!!!!
# -----------------------------------------------------------------
# This view HAS to make sure that ONLY users with proper
# --access rights can manipulate user accounts. Because User accounts and their OneToOne Permission Model
# --control access, only project 'Admins' or (level 5) can actually edit users and create new ones.
#
# Because Django requires high-level permissions on all of its users to access admin functions, I had to implement
# --another layer of control. This should work perfectly find and secure. Essentially, ANY user outside a 'Master Admin'
# --can ONLY edit members of their own project. This view handles that by automatically forcing this new user to be part
# --of the project of the current user's session.
#
# Additionally, If the user doesn't ahve the correct access level of 5 to do this action, nothing will happen and it will
# --return an error explaining what occured. This SHOULDN'T happen--because the javascript allowing this is only installed
# --on the client IF they already have the permission level--HOWEVER--if this jscript is downloaded off the GIT or some other
# --source and inserted into the page(which should only happen if they already HAVE access to some project on this database)--this
# --ensuress that no attack is possible.
#
# Finally, SQL injection should be a Null issue here--I do not allow any raw() SQL to be used in any form to date--so any insertions
# --should be automatically cleaned by Django's built-in ORM functions
#-------------------------------------------------------------------------------------------------------------------------------------
# POST json will contain a list of 'users' that contain several keys
# JSON KEYS : "is_new_user" , "username" , "password" , "access_level", "name" , "title", "email"
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
#Make sure we have the right key in the POST data
if 'user_change_list' in request.POST:
#Let's grab our json Data and convert it to a python Dictionary
userJSON = json.loads(request.POST['user_change_list'])
print >>sys.stderr, userJSON
PROGRESS_MESSAGE = ""
DELETE_KEYS = ""
#Now loop through each 'user' in the dictionary and continue making edits/or create them
for aUser in userJSON['userlist']:
#We also now need to make sure that there are the bare mininum of keys needed(username, pass, access_level, and edit/create
if 'is_new_user' in aUser and 'username' in aUser and 'password' in aUser and 'access_level' in aUser:
#NOW *sigh of exhaustion* let's make sure that the user/pass/access_level isn't blank
# --We have to do this, because if someone hacks the jscript--they can force submit a blank input.
# --This shouldn't have deleterious side-effects--but we're not playing around anyway!
if aUser['is_new_user'] != "" and aUser['username'] != "" and aUser['password'] != "" and aUser['access_level'] != "":
#OKAY! We are all set to create/edit a user
#----CREATING A NEW USER -------------------------------------------------------------
if aUser['is_new_user'] == "T":
#We need to make sure there isn't already a username in the database with the submitted name
if User.objects.all().filter(username=aUser['username']).exists() != True:
newUser = User.objects.create_user(username=aUser['username'],password=aUser['password'])
#ADD ALL STATIC INFORMATION
newUser.is_staff = True
newUser.is_active = True
#newUser.save()
#ADD USER SUBMITTED INFORMATION
#--SECURITY NEEDS: Make sure to ONLY use the project from the user's own Session data that's already been authorized
#--Also make sure the access level is set, and MAKE sure the access_level is an Integer and not a string
isInt = True
try:
newUser.permissions.access_level = int(aUser['access_level'])
except Exception as inst:
isInt = False
if isInt:
newUser.permissions.project = request.user.permissions.project
newUser.permissions.title = aUser['title']
newUser.email = aUser['email']
#figure out names--if there's more than one space first in list is first name--rest is last name
splitName = aUser['name'].split(' ')
newUser.first_name = splitName[0]
lastName = ""
if len(splitName) > 1:
#start at index 1--we don't need the first name
for i in range(1, len(splitName)):
lastName += splitName[i]
newUser.last_name = lastName
#If all goes well, save the new User to the database
newUser.save()
PROGRESS_MESSAGE += " Made a new user: " + newUser.username + " --- "
else:
#Delete the user and add an error message
newUser.delete()
ERROR_MESSAGE += " Uh Oh! Something happened with: the access level submitted when creating a new user!" + str(inst) +" --You probably tried submitting a non-int for an integer access level?"
else:
ERROR_MESSAGE += "That username already exists!"
#----EDITING AN EXISTING USER -------------------------------------------------------------
elif aUser['is_new_user'] == "F":
#--SECURITY NEEDS: We have to be mindful here of how access is given to PK lookups, e.g. a user
# --might have injected a different user PK than is part of this project. We'll filter by the
# --user's own Project PK to ensure ONLY User PKs attached this project can be modified
# --This also ensures no SQL injection can be performed
userToEdit = Permissions.objects.all().filter(user__pk=aUser['user_id'], project__pk = request.user.permissions.project.pk)[0].user
#We can only modify a small subset of the user's fields
isInt = True
try:
userToEdit.permissions.access_level = int(aUser['access_level'])
except:
isInt = False
if isInt:
#First try and edit the user's name--if it's the same as the current name than skip, and if it's different make sure it's not taken
if userToEdit.username != aUser['username']:
if User.objects.all().filter(username=aUser['username']).exists() == False:
userToEdit.username = aUser['username']
else:
#Just give a simple ERROR MESSAGE
ERROR_MESSAGE += " There was a problem with " + userToEdit + "'s username change. The name: "+ aUser['username'] +" already exists in the database! Try choosing a new one"
userToEdit.permissions.title = aUser['title']
userToEdit.email = aUser['email']
#figure out names--if there's more than one space first in list is first name--rest is last name
splitName = aUser['name'].split(' ')
if len(splitName) > 0:
userToEdit.first_name = splitName[0]
lastName = ""
#start at index 1--we don't need the first name
for i in range(1, len(splitName)):
lastName += " " + splitName[i]
userToEdit.last_name = lastName
else:
userToEdit.first_name = aUser['name']
userToEdit.last_name = ""
#If all goes well, save the new User to the database
userToEdit.save()
PROGRESS_MESSAGE += " Edited a user: " + userToEdit.username + " --- "
else:
ERROR_MESSAGE += " Uh Oh! Something happened with: the access level submitted when editing a new user" + " --You probably tried submitting a non-int for an integer access level?"
#----DELETING AN EXISTING USER -------------------------------------------------------------
elif aUser['is_new_user'] == 'DELETE':
#--SECURITY NEEDS: We have to be mindful here of how access is given to PK lookups, e.g. a user
# --might have injected a different user PK than is part of this project. We'll filter by the
# --user's own Project PK to ensure ONLY User PKs attached this project can be modified
# --This also ensures no SQL innjection can be performed
userToDelete = Permissions.objects.all().filter(user__pk=aUser['user_id'], project__pk = request.user.permissions.project.pk)[0].user
print >>sys.stderr, str(request.user.permissions.project.pk) + " --- " + str(aUser['user_id'])
print >>sys.stderr, userToDelete
#userToDelete = userToDelete[0].user
#userToDelete = request.user.permissions.project.permissions_set.all().filter(user__pk = aUser['user_id'])[0].user
print >>sys.stderr, userToDelete.username + " : " + str(userToDelete.permissions.project)
PROGRESS_MESSAGE += " DELETED a user: " + userToDelete.username + " --- "
DELETE_KEYS+= '"DELETED_'+aUser['user_id']+'":"'+ aUser['user_id'] +'",'
userToDelete.delete()
else:
ERROR_MESSAGE += "Error: "+ aUser['username'] +" : is_edit="+ aUser['is_new_user']+" : Hmm--We can't figure out if you're editing or creating a user, something may have happened to the POST data. You didn't try and hack it did you?"
else:
ERROR_MESSAGE += "Error: You are missing required fields that seem to be blank"
else:
ERROR_MESSAGE += "Error: You are missing required json keys to continue"
#Remove the trailing comma from our DELETE_KEYS if they exist
if len(DELETE_KEYS) > 0:
DELETE_KEYS = DELETE_KEYS[:-1]
DELETE_KEYS = "," + DELETE_KEYS
if ERROR_MESSAGE == "":
#Because user objects do not have a last modified/date modified field, we will log each time these occur to the log files in case of any issues that arise
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), request.user.username + ': has made user changes --: ' + PROGRESS_MESSAGE, request.META)
#Now return a successful JSON response back to the request, if we successfully navigated ALL users
return HttpResponse('{"Message":"Successful! '+ PROGRESS_MESSAGE +'"'+ DELETE_KEYS+ '}', content_type="application/json")
else:
#Because user objects do not have a last modified/date modified field, we will log each time these occur to the log files in case of any issues that arise
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), request.user.username + ': has made user changes --: ' + PROGRESS_MESSAGE, request.META)
#Return a semi-successful JSON response--It may have added some users, but there may have been errors too
return HttpResponse('{"Message":"Successful!--but with errors =( '+ PROGRESS_MESSAGE + ' !!!! ' + ERROR_MESSAGE +' "}', content_type="application/json")
ERROR_MESSAGE += "Error: You are missing required information in the POST header to create a new User for your project."
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 2 SAVE_FORM_CHANGES()
#=======================================================#
def save_form_changes(self, request):
#***************#
ACCESS_LEVEL = 2
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
#:::This function edits a form. In order to maintain integrity when editing a form--we need to assume
# --the worst. In this case someone may be atempting to pass a different pk into this endpoint and edit a new form there.
# --this isn't terribly problematic in terms of security--if someone can access this function then they can edit any form in
# --their project. We just need to make sure they can ONLY affect forms in their own project. Performing a simple check on the
# --form parent pks the submitted RTYPES are child'd to should be enough to deter these shenanigans--but once again--the worst someone can
# --do if hijacking this endpoint is add/change new data. They can't delete anything.
#------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
print >>sys.stderr, request.POST
post_data = request.POST
#$$$ SECURITY $$$ Make sure we can ONLY access this form for editing if it is within the user's project space
formToEdit = Form.objects.get(pk=post_data['form_id'])
if formToEdit.project.pk == request.user.permissions.project.pk:
form_type = formToEdit.form_type
#Update the form's basic attributes
#Figure out if the input field is a number(int) or a string label
#*I'm not particulary fond of using try/catch's to control logic, but apparently it is the
#"pythonic" thing to do looking online and the Python Core uses this often it seems
try:
formToEdit.form_name = post_data.get('form_number')
formToEdit.form_number = int(post_data.get('form_number'))
except:
formToEdit.form_name = post_data.get('form_number')
formToEdit.form_number = None
formToEdit.form_geojson_string = post_data.get('form_geojson_string')
#Update the hierchical parent reference if relevant
if form_type.is_hierarchical:
if post_data.get('hierarchical_reference') == 'NONE':
formToEdit.hierarchy_parent = None
else:
formToEdit.hierarchy_parent = Form.objects.get(pk=post_data.get('hierarchical_reference'))
for key in post_data:
splitKey = key.split("__")
#Update all of the form's FormRecordReferenceTypes
if len(splitKey) > 1:
if len(splitKey) == 2:
code,type_pk = splitKey
print >> sys.stderr, "Getting Close: " + code + " : " + type_pk
#Update all of the form's FormRecordAttributeValues
if code == "frav":
currentValue = FormRecordAttributeValue.objects.get(pk=type_pk)
# $$$ SECURITY $$$: Before we make any changes, we need to make sure we are editing a record value that has
# --the same project parent as the user. The user could inject pks from other projects into this and randomly
# --attack data.
if currentValue.project.pk == request.user.permissions.project.pk:
currentValue.record_value = post_data[key]
#Add the user information
currentValue.modified_by = request.user
currentValue.save()
else:
ERROR_MESSAGE += "You have attempted to edit a form with an attribute record type that is not part of your project space."
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#If we're editing this particular reference
elif code == "frrv":
currentReferenceValue = FormRecordReferenceValue.objects.get(pk=type_pk)
# $$$ SECURITY $$$: Before we make any changes, we need to make sure we are editing a record value that has
# --the same project parent as the user. The user could inject pks from other projects into this and randomly
# --attack data.
if currentReferenceValue.project.pk == request.user.permissions.project.pk:
#first clear the manytomany field
currentReferenceValue.record_reference.clear()
#loop through all available selections and add them to the manytomany field
for reference in post_data.getlist(key):
print >> sys.stderr, reference + " <!----- ADDING THIS REF"
#make sure we add a null check here--the user might not have chosen a referenced form
if reference != '':
currentReferenceValue.record_reference.add(Form.objects.get(pk=reference))
print >> sys.stderr, str(currentReferenceValue.record_reference) + " <!----- ADDED THIS REF"
#Add the user information
currentReferenceValue.modified_by = request.user
if ('frrv__'+type_pk+'__ext') in request.POST:
currentReferenceValue.external_key_reference = request.POST['frrv__'+type_pk+'__ext']
else:
currentReferenceValue.external_key_reference = request.POST[key]
#save the reference value
currentReferenceValue.save()
else:
ERROR_MESSAGE += "You have attempted to edit a form with a reference record type that is not part of your project space."
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
elif code == "frrvNEW":
#If there isa 'new' FRRV needed, that means the formtype was created manually and not through the csv importer. This is fine, we just
#need to make a new one now and add the necessary attributes to it.
newFRRV = FormRecordReferenceValue()
newFRRV.project = request.user.permissions.project
newFRRV.created_by = request.user
newFRRV.modified_by = request.user
newFRRV.record_reference_type = FormRecordReferenceType.objects.get(pk=type_pk)
newFRRV.form_parent = formToEdit
if ('frrvNEW__'+type_pk+'__ext') in request.POST:
newFRRV.external_key_reference = request.POST['frrvNEW__'+type_pk+'__ext']
else:
newFRRV.external_key_reference = request.POST[key]
#We have to save the new FRRV to the SQL database before adding new references I think
newFRRV.save()
for reference in post_data.getlist(key):
print >> sys.stderr, reference + " <!----- ADDING THIS REF"
#make sure we add a null check here--the user might not have chosen a referenced form
if reference != '':
newFRRV.record_reference.add(Form.objects.get(pk=reference))
print >> sys.stderr, str(newFRRV.record_reference) + " <!----- ADDED THIS REF"
newFRRV.save()
#If we're dealing with a NEW FRAV -- it will be coded as 'frat' instead of 'frav'
elif code == "frat":
currentFRAT = FormRecordAttributeType.objects.get(pk=type_pk)
#$$$ SECURITY $$$ -- We need make sure they are trying to add rtype values that are attached to this project
#if they are not, then show an error page and delete this current form.
if currentFRAT.project.pk == request.user.permissions.project.pk:
newformrecordattributevalue = FormRecordAttributeValue(record_value = post_data[key])
newformrecordattributevalue.form_parent=formToEdit
newformrecordattributevalue.record_attribute_type=currentFRAT
#Add the user information - We only set created by in endpoints that create the model for the first time
newformrecordattributevalue.created_by = request.user
newformrecordattributevalue.modified_by = request.user
newformrecordattributevalue.save()
else:
ERROR_MESSAGE += "You have attempted to add a form with a attribute record type that is not part of your project space."
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
# If we've managed to get this far, then save the form changes. Otherwise some error occured and nothing should be saved
# --in order to maintain database integrity -- this will still not affect individual values--but it will stop some things from changing.
#Add the user information
formToEdit.modified_by = request.user
formToEdit.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
ERROR_MESSAGE += "Error: You are attempting to access a form outside your project space!"
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 2 CREATE_NEW_FORM()
#=======================================================#
def create_new_form(self, request):
#***************#
ACCESS_LEVEL = 2
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
#:::This function creates a new form of the given form type. In order to maintain integrity when creating a new form--we need to assume
# --the worst. In this case someone may be atempting to pass a different form-type pk into this endpoint and create a new form there.
# --this isn't terribly problematic in terms of security--if someone can access this function then they can edit any form type in
# --their project. We just need to make sure they can ONLY affect form types in their own project. Performing a simple check on the
# --form_type the submitted RTYPES are child'd to should be enough to deter these shenanigans--but once again--the worst someone can
# --do if hijacking this endpoint is add new data. They can't delete anything.
#------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
#Create New Form with formtype
post_data = request.POST
form_type = FormType.objects.get(pk=post_data['formtype_id'])
#Make sure we're in the user's allowed project space
if form_type.project.pk == request.user.permissions.project.pk:
addedFRAT_pks = {}
addedFRRT_pks = {}
newform = Form(form_name='', form_number=post_data.get('form_number'), form_geojson_string=post_data.get('form_geojson_string'))
newform.form_type=form_type
#Add the user information - We only set created by in endpoints that create the model for the first time
newform.created_by = request.user
newform.modified_by = request.user
#Figure out if the input field is a number(int) or a string label
#"pythonic" thing to do looking online and the Python Core uses this often it seems
#*I'm not particulary fond of using try/catch's to control logic, but apparently it is the Python way
try:
newform.form_name = post_data.get('form_number')
newform.form_number = int(post_data.get('form_number'))
except:
newform.form_name = post_data.get('form_number')
newform.form_number = None
#Update the hierchical parent reference if relevant
if form_type.is_hierarchical:
if post_data.get('hierarchical_reference') == 'NONE':
newform.hierarchy_parent = None
else:
newform.hierarchy_parent = Form.objects.get(pk=post_data.get('hierarchical_reference'))
#save the form
newform.save()
print >> sys.stderr, request.POST
#Now we need to create all the attributes from the form input
for key in post_data:
splitKey = key.split("__")
if len(splitKey) > 1:
if len(splitKey) == 2:
code,type_pk = splitKey
print >> sys.stderr, "Getting Close: " + code + " : " + type_pk
if code == "frat":
currentFRAT = FormRecordAttributeType.objects.get(pk=type_pk)
#$$$ SECURITY $$$ -- We need make sure they are trying to add rtype values that are attached to this project
#if they are not, then show an error page and delete this current form.
if currentFRAT.project.pk == request.user.permissions.project.pk:
newformrecordattributevalue = FormRecordAttributeValue(record_value = post_data[key])
newformrecordattributevalue.form_parent=newform
newformrecordattributevalue.record_attribute_type=currentFRAT
#Add the user information - We only set created by in endpoints that create the model for the first time
newformrecordattributevalue.created_by = request.user
newformrecordattributevalue.modified_by = request.user
newformrecordattributevalue.save()
addedFRAT_pks[currentFRAT.pk] = "Added"
else:
newform.delete()
ERROR_MESSAGE += "You have attempted to add a form with a attribute record type that is not part of your project space."
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#Create all references from the form input
elif code == "frrvNEW":
currentFRRT = FormRecordReferenceType.objects.get(pk=type_pk)
#$$$ SECURITY $$$ -- We need make sure they are trying to add rtype values that are attached to this project
#if they are not, then show an error page and delete this current form.
if currentFRRT.project.pk == request.user.permissions.project.pk:
newFRRV = FormRecordReferenceValue()
newFRRV.project = request.user.permissions.project
newFRRV.created_by = request.user
newFRRV.modified_by = request.user
newFRRV.record_reference_type = currentFRRT
newFRRV.form_parent = newform
newFRRV.external_key_reference = request.POST['frrvNEW__'+type_pk+'__ext']
#We have to save the new FRRV to the SQL database before adding new references I think
newFRRV.save()
for reference in post_data.getlist(key):
print >> sys.stderr, reference + " <!----- ADDING THIS REF"
#make sure we add a null check here--the user might not have chosen a referenced form
if reference != '':
newFRRV.record_reference.add(Form.objects.get(pk=reference))
print >> sys.stderr, str(newFRRV.record_reference) + " <!----- ADDED THIS REF"
newFRRV.save()
addedFRRT_pks[currentFRRT.pk] = "Added"
else:
newform.delete()
ERROR_MESSAGE += "You have attempted to add a form with a reference record type that is not part of your project space."
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#Now we're going to make empty FRRTs and FRATs for all the remaining ones attached tot he form type
allFRATs = form_type.formrecordattributetype_set.all();
allFRRTs = form_type.ref_to_parent_formtype.all();
print >>sys.stderr, addedFRAT_pks
print >>sys.stderr, addedFRRT_pks
#First make all necessary empty FRATS
if allFRATs:
for FRAT in allFRATs:
if FRAT.pk in addedFRAT_pks:
print >>sys.stderr, "YEP"
else:
#Make a new FRAV for this FRAT
newFRAV = FormRecordAttributeValue(record_value = "")
newFRAV.form_parent=newform
newFRAV.record_attribute_type=FRAT
#Add the user information - We only set created by in endpoints that create the model for the first time
newFRAV.created_by = request.user
newFRAV.modified_by = request.user
newFRAV.save()
if allFRRTs:
for FRRT in allFRRTs:
if FRRT.pk in addedFRRT_pks:
print >>sys.stderr, "YEP"
else:
#Make a new FRRV for this FRRT
newFRRV = FormRecordReferenceValue()
newFRRV.project = request.user.permissions.project
newFRRV.created_by = request.user
newFRRV.modified_by = request.user
newFRRV.record_reference_type = FRRT
newFRRV.form_parent = newform
newFRRV.external_key_reference = ""
#We have to save the new FRRV to the SQL database before adding new references I think
newFRRV.save()
#SUCCESS!!
return HttpResponse('{"MESSAGE":"SUCCESS!"}',content_type="application/json")
ERROR_MESSAGE += "Error: You do not have permission to accesss this project."
else: ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 SAVE_USER_QUERY() *RECYCLING
#=======================================================#
def save_user_query(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#We need to return a json list of all formtype RTYPES that match the provided formtype pk
if request.method == "POST":
#We only add queries to the user and nothing else
currentQueries = request.user.permissions.saved_queries
print >>sys.stderr, currentQueries
if currentQueries != "" and currentQueries != None:
currentQuery = json.loads(currentQueries)
currentQuery[request.POST['new_query_label']] = request.POST['new_query']
finishedQueryList = json.dumps(currentQuery);
request.user.permissions.saved_queries = finishedQueryList
request.user.permissions.save()
return HttpResponse(finishedQueryList, content_type="application/json" )
else:
newQuery = {}
newQuery[request.POST['new_query_label']] = request.POST['new_query']
newQuery = json.dumps(newQuery)
request.user.permissions.saved_queries = newQuery
request.user.permissions.save()
return HttpResponse(newQuery, content_type="application/json" )
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#-------------------------------------------------------------------------------------------------------
# MODEL QUERY ENDPOINTS
#=======================================================#
# ACCESS LEVEL : 1 GET_PROJECTS() *RECYCLING
#=======================================================#
def get_projects(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint returns a list of all projects. This is used mainly by the query engine
# --to figure out which rtypes to search by when a record reference type is chosen.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#We need to return a json list of all formtype RTYPES that match the provided formtype pk
if request.method == "POST":
#let's get all the public projects, which may not include our own, so let's redundantly merge it and then call distinct()
publicProjects = FormProject.objects.filter(is_public=True)
userProject = FormProject.objects.filter(pk=request.user.permissions.project.pk)
if publicProjects.exists():
finalProjects = (publicProjects |userProject).distinct()
else:
finalProjects = userProject
finalJSON = {}
project_list = []
for aProject in finalProjects:
project_list.append({"name":aProject.name, "pk":aProject.pk})
finalJSON['project_list'] = project_list
finalJSON = json.dumps(finalJSON)
return HttpResponse(finalJSON, content_type="application/json" )
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_FORMTYPES() *RECYCLING
#=======================================================#
def get_formtypes(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint returns a list of all formtypes for a provided project pk. This is used mainly by the query engine
# --to figure out which formtypes to add to a dropdown select by when a project is chosen.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#We need to return a json list of all formtype RTYPES that match the provided formtype pk
if request.method == "POST":
#Let's get all available public formtypes not in recycling--unless the formtypes are from the users current, project.
#If it is the users current project, then don't use a is_public filter
print >>sys.stderr, request.POST['project_pk'] + " : "
if str(request.user.permissions.project.pk) == request.POST['project_pk']:
print >>sys.stderr, "What...?" + str(request.user.permissions.project.pk)
allFormTypes = FormType.objects.filter(project__pk=request.POST['project_pk'], flagged_for_deletion=False)
else:
allFormTypes = FormType.objects.filter(is_public=True, project__pk=request.POST['project_pk'], flagged_for_deletion=False)
if allFormTypes:
finalJSON = {}
formtype_list = []
for aFormType in allFormTypes:
formtype_list.append({"name":aFormType.form_type_name, "pk":aFormType.pk})
finalJSON['formtype_list'] = formtype_list
finalJSON = json.dumps(finalJSON)
return HttpResponse(finalJSON, content_type="application/json" )
else: ERROR_MESSAGE += "Error: no form types were found for this project"
else: ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_FORMTYPE_GEOSPATIAL_LAYERS() *RECYCLING
#=======================================================#
def get_formtype_geospatial_layers(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint returns a list of geoJSON 'geometry' layers to add to a openlayers map
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == "POST":
print >>sys.stderr, request.POST['formtype_pk'] + " : "
currentFormType = FormType.objects.get(pk=request.POST['formtype_pk'])
if request.user.permissions.project.pk == currentFormType.project.pk:
#geometry needs to be stored as a list of 'features'
allGeometry = {}
allGeometry['type'] = "FeatureCollection"
allGeometry['name'] = currentFormType.form_type_name
#allGeometry['crs'] = json.loads('{ "type": "name", "properties": { "name": "urn:ogc:def:crs:EPSG::32638" } }')
featureList = []
allGeometry['features'] = featureList
allForms = currentFormType.form_set.all()
if allForms:
for aForm in allForms:
properties = {}
allFRATs = aForm.form_type.formrecordattributetype_set.all();
if allFRATs:
for FRAT in allFRATs:
properties[FRAT.record_type] = FormRecordAttributeValue.objects.get(record_attribute_type=FRAT, form_parent=aForm).record_value
feature = {}
feature['properties'] = properties
feature['type'] = "Feature"
feature['geometry'] = json.loads(aForm.form_geojson_string)
print >>sys.stderr, "Loaded Timer"
featureList.append(feature)
allGeometry = json.dumps(allGeometry)
return HttpResponse(allGeometry,content_type="application/json")
else: ERROR_MESSAGE += "You do not have permission to access this form type from another project"
else: ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_RTYPES *RECYCLING
#=======================================================#
def get_rtypes(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint returns a list of all rtypes for a provided formtype pk. This is used mainly by the query engine
# --to figure out which formtypes to add to a dropdown select by when a project is chosen.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#We need to return a json list of all formtype RTYPES that match the provided formtype pk
if request.method == "POST":
#Grab the formtype
currentFormType = FormType.objects.get(pk=request.POST['formtype_pk'])
#If the requested formtype isn't the user's project, and flagged as being inaccessible then stop the request
if currentFormType.project.pk != request.user.permissions.project.pk and (currentFormType.flagged_for_deletion == True or currentFormType.is_public == False):
ERROR_MESSAGE += "Error: You are attempting to access records that don't exist. This probably occurred because your client attempted altering the POST data before sending"
#Otherwise we are in the clear so grab the list and return it
else:
finalJSON = {}
rtypeList = []
#Don't filter out the public flags if this formtype is the users project--if it's not then absolutely use the is_public flags
if currentFormType.project.pk == request.user.permissions.project.pk:
#***RECYCLING BIN*** Make sure that the returned FRAT AND FRRTS are filtered by their deletion flags. Don't want them returned in the query
for FRAT in currentFormType.formrecordattributetype_set.all().filter(flagged_for_deletion=False):
currentRTYPE = {}
currentRTYPE['label'] = FRAT.record_type
currentRTYPE['pk'] = FRAT.pk
currentRTYPE['rtype'] = 'FRAT'
rtypeList.append(currentRTYPE)
#***RECYCLING BIN*** Make sure that the returned FRAT AND FRRTS are filtered by their deletion flags. Don't want them returned in the query
for FRRT in currentFormType.ref_to_parent_formtype.all().filter(flagged_for_deletion=False):
currentRTYPE = {}
currentRTYPE['label'] = FRRT.record_type
currentRTYPE['pk'] = FRRT.pk
if FRRT.form_type_reference: currentRTYPE['ref_formtype_pk'] = FRRT.form_type_reference.pk
else: currentRTYPE['ref_formtype_pk'] = "None"
currentRTYPE['rtype'] = 'FRRT'
rtypeList.append(currentRTYPE)
else:
#***RECYCLING BIN*** Make sure that the returned FRAT AND FRRTS are filtered by their deletion flags. Don't want them returned in the query
for FRAT in currentFormType.formrecordattributetype_set.all().filter(flagged_for_deletion=False, is_public=True):
currentRTYPE = {}
currentRTYPE['label'] = FRAT.record_type
currentRTYPE['pk'] = FRAT.pk
currentRTYPE['rtype'] = 'FRAT'
rtypeList.append(currentRTYPE)
#***RECYCLING BIN*** Make sure that the returned FRAT AND FRRTS are filtered by their deletion flags. Don't want them returned in the query
for FRRT in currentFormType.ref_to_parent_formtype.all().filter(flagged_for_deletion=False, is_public=True):
currentRTYPE = {}
currentRTYPE['label'] = FRRT.record_type
currentRTYPE['pk'] = FRRT.pk
if FRRT.form_type_reference: currentRTYPE['ref_formtype_pk'] = FRRT.form_type_reference.pk
else: currentRTYPE['ref_formtype_pk'] = "None"
currentRTYPE['rtype'] = 'FRRT'
rtypeList.append(currentRTYPE)
#sort our rtype list by the label
rtypeList = sorted(rtypeList, key=lambda k: k['label'])
#Return the JSON response
finalJSON['rtype_list'] = rtypeList
finalJSON = json.dumps(finalJSON)
return HttpResponse(finalJSON, content_type="application/json" )
else: ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 CHECK_PROGRESS()
#=======================================================#
def check_progress(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint just checks the progress of the submitted UUID Progress Object
# --It's used by longer functions that require time on the server to process to keep the usser updated on the progress of their
# --formtype generator submitted. Security isn't particularly important here, because the information provided isn't particularly sensitive,
# --and this model/object doesn't have a foreign key to a project. It can only be accessed by a UUID(unique ID) provided by the user
# --and the chance of someone figuring out a 32character long random string in the small amount of time it takes to process the server
# --function is considerably low--and even if they DID manage to hack it, the information they recieve is essentially rubbish and offers
# --no sensitive data except perhaps the name or label of some rtypes--and associated counts for the query. I suppose that could be
# --potentially sensitive--but the security risk is so low that I won't spend time worrying about it.
#
# TODO: an option to secure this, is to attach a foreign key to the ProgressObject to the project in question. This Endpoint could then
# --cross check the session user's project and make sure they're only accessing progress objects that are part of their project. Once
# --again--not a priority right now but I ahve it in a TODO tag for future edits when time is more available
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#print >> sys.stderr, request.POST
#Returns a JSON string to an AJAX request given a provided UUID
try:
currentProcessObject = AJAXRequestData.objects.filter(uuid=request.POST['uuid'])[0]
#print >>sys.stderr, "Keeping Alive?"
currentProcessObject.keep_alive = True
currentProcessObject.save()
#If finished, then delete the process object
if currentProcessObject.is_finished:
print >> sys.stderr, "DELETING OBJECT I GUESS?"
currentProcessObject.delete()
currentJson = currentProcessObject.jsonString
#print >>sys.stderr, currentProcessObject.jsonString
#return the json response
return HttpResponse(currentJson, content_type="application/json")
except Exception as e:
print >>sys.stderr, "Whoops---hmmm....."
print >>sys.stderr, e
ERROR_MESSAGE += "Something happened during the check to the Progress Object--it might not have been created yet, and we are checking too quickly..." + str(e)
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 CHECK_PROGRESS_QUERY()
#=======================================================#
def check_progress_query(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint just checks the progress of the submitted UUID Progress Object
# --It's used by longer functions that require time on the server to process to keep the usser updated on the progress of their
# --query submitted. Security isn't particularly important here, because the information provided isn't particularly sensitive,
# --and this model/object doesn't have a foreign key to a project. It can only be accessed by a UUID(unique ID) provided by the user
# --and the chance of someone figuring out a 32character long random string in the small amount of time it takes to process the server
# --function is considerably low--and even if they DID manage to hack it, the information they recieve is essentially rubbish and offers
# --no sensitive data except perhaps the name or label of some rtypes--and associated counts for the query. I suppose that could be
# --potentially sensitive--but the security risk is so low that I won't spend time worrying about it.
# TODO: an option to secure this, is to attach a foreign key to the ProgressObject to the project in question. This Endpoint could then
# --cross check the session user's project and make sure they're only accessing progress objects that are part of their project. Once
# --again--not a priority right now but I ahve it in a TODO tag for future edits when time is more available
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Returns a JSON string to an AJAX request given a provided UUID
try:
currentProcessObject = AJAXRequestData.objects.filter(uuid=request.GET['uuid'])[0]
currentProcessObject.keep_alive = True
currentProcessObject.save()
#If finished, then delete the process object
if currentProcessObject.is_finished:
print >> sys.stderr, "DELETING OBJECT I GUESS?"
currentProcessObject.delete()
currentJson = currentProcessObject.jsonString
#return the json response
return HttpResponse(currentJson, content_type="application/json")
except Exception as e:
print >>sys.stderr, "Whoops---hmmm....."
print >>sys.stderr, e
ERROR_MESSAGE += "Something happened during the check to the Progress Object--it might not have been created yet, and we are checking too quickly..."
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"False", "row_total":"0", "row_timer":"0"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You do not have permission to access checking a query UUID progress object"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'","row_index":"0","is_complete":"True", "row_total":"0", "row_timer":"0"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_RTYPE_LIST() *RECYCLING
#=======================================================#
def get_rtype_list(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint returns a list of all record types in a formtype template. This is used mainly by the query engine
# --to figure out which rtypes to search by when a record reference type is chosen.
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#We need to return a json list of all formtype RTYPES that match the provided formtype pk
if request.method == "POST":
if 'frrt-pk' in request.POST:
currentFormType = FormType.objects.get(pk=FormRecordReferenceType.objects.get(pk=request.POST['frrt-pk']).form_type_reference.pk)
elif 'formtype_pk' in request.POST:
currentFormType = FormType.objects.get(pk=request.POST['formtype_pk'])
if currentFormType:
# $$$-SECURITY-$$$: Make sure we filter by the users project as usual
#TODO: This will obviously trigger server side errors if the returned query is empty(e.g. the user tries to access a formtype that isn't attached to their project)
if currentFormType.project.pk == request.user.permissions.project.pk:
finalJSON = {}
rtypeList = []
#***RECYCLING BIN*** Make sure that the returned FRAT AND FRRTS are filtered by their deletion flags. Don't want them returned in the query
for FRAT in currentFormType.formrecordattributetype_set.all().filter(flagged_for_deletion=False):
currentRTYPE = {}
currentRTYPE['label'] = FRAT.record_type
currentRTYPE['pk'] = FRAT.pk
currentRTYPE['rtype'] = 'FRAT'
rtypeList.append(currentRTYPE)
#***RECYCLING BIN*** Make sure that the returned FRAT AND FRRTS are filtered by their deletion flags. Don't want them returned in the query
for FRRT in currentFormType.ref_to_parent_formtype.all().filter(flagged_for_deletion=False):
currentRTYPE = {}
currentRTYPE['label'] = FRRT.record_type
currentRTYPE['pk'] = FRRT.pk
if FRRT.form_type_reference: currentRTYPE['ref_formtype_pk'] = FRRT.form_type_reference.pk
else: currentRTYPE['ref_formtype_pk'] = "None"
currentRTYPE['rtype'] = 'FRRT'
rtypeList.append(currentRTYPE)
finalJSON['rtype_list'] = rtypeList
finalJSON = json.dumps(finalJSON)
return HttpResponse(finalJSON, content_type="application/json" )
ERROR_MESSAGE += "Error: You are trying to access a FRRT that doesn't belong to this project!"
ERROR_MESSAGE += "Error: no FormRecordReferenceType in POST"
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_FORM_SEARCH_LIST() *RECYCLING
#=======================================================#
def get_form_search_list(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#----------------------------------------------------------------------------------------------------------------------------
# This Endpoint does nothing but return a small list of forms that match the provided query string
# --It acts as a simple Google style search bar that autocompletes the user's typing. This is handy
# --when a project may have upwards of 5000 forms and scrolling through/loading a list of 5000 forms is a bit slow and unwieldy
#
# Speed: This function, on a low-end server, can produce an answer in less than a second
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
if 'query' in request.POST:
#initialize our variables we'll need
projectPK = request.POST['projectID']
formtypePK = request.POST['formtypeID']
searchString = request.POST['query']
jsonResponse = {}
form_list = []
jsonResponse['form_list'] = form_list
#Only search if the searchString isn't empty
if len(searchString) != 0:
#Initialize our query to contain all forms of this formtype and project
queriedForms = Form.objects.all().filter(form_type__pk=formtypePK)
# $$$-SECURITY-$$$: Make sure we filter by the users project as usual
queriedForms.filter(project__pk=request.user.permissions.project.pk)
#***RECYCLING BIN*** Make sure that we filter out any forms flagged for deletion
queriedForms.filter(flagged_for_deletion=False)
allTerms = searchString.split(' ')
#I'd like to do a starts with filter if there is less than 2 letters in the first term, otherwise
#--go back to a normal icontains.
if len(allTerms) == 1:
if len(searchString) < 3:
newQuery = queriedForms.filter(form_name__istartswith=searchString)
#Now let's make this just a tad bit more robust--if it finds zero matches with istartswith--then default back to icontains until it finds a match
if newQuery.exists() != True:
queriedForms = queriedForms.filter(form_name__icontains=searchString)
else:
queriedForms = newQuery
else:
queriedForms = queriedForms.filter(form_name__icontains=searchString)
elif len(allTerms) > 1:
for term in allTerms:
queriedForms = queriedForms.filter(form_name__icontains=term)
#We need to get a list no longer than 5 long of the submitted results
queriedForms = queriedForms[:5]
#create our python dict to send as JSON
for form in queriedForms:
currentForm = {}
currentForm['projectPK'] = form.project.pk
currentForm['formtypePK'] = form.form_type.pk
currentForm['formPK'] = form.pk
currentForm['label'] = form.form_name
currentForm['longLabel'] = form.form_type.form_type_name + " - " + form.form_name
currentForm['thumbnail'] = form.get_ref_thumbnail()
currentForm['url'] = reverse('maqlu_admin:edit_form',kwargs={'project_pk': request.user.permissions.project.pk, 'form_type_pk': form.form_type.pk, 'form_pk':form.pk})
form_list.append(currentForm)
#return the finished JSON
jsonResponse = json.dumps(jsonResponse)
return HttpResponse(jsonResponse, content_type="application/json")
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_PREVIOUS_NEXT_FORMS() *RECYCLING
#=======================================================#
def get_previous_next_forms(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#This API EndPoint takes a formtype PK and a form PK and returns the previous, current, and next forms in a sorted list
#--This gives back and forward functionality when navigating forms.
#--It first filters out only the forms related to the formtype, and then sorts them by the indexed value
#--'sort_index' -- sort_index is a Form attribute that is a unique indexed value "<form_name>---<form_pk>"
#--We then submit the parsed out name and pk numbers for the previous and next forms for the form requested
#--This also forces a users project as a filter--jsut in case they manage to find a way to pass a form_type that doesn't belong to their project
#----------------------------------------------------------------------------------------------------------------------------
# Speed: This function, on a low-end server, can produce an answer in ~1.5 secs for a sort of ~100,000 rows
# --Anything less than that easily hits under a second--which is nice and fast
# --I assume on a deployment server with better cpus/RAM this will be even faster
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
#POST values submitted are : formtype_pk & form_pk & project_pk
#Check if they exist, and only continue if they do
if 'formtype_pk' in request.POST and 'form_pk' in request.POST and 'project_pk' in request.POST:
thisQuery = Form.objects.filter(form_type__pk=request.POST['formtype_pk'])
# $$$-SECURITY-$$$: Make sure we filter by the users project as usual
thisQuery.filter( project__pk=request.user.permissions.project.pk)
#***RECYCLING BIN*** Make sure that the returned Forms aren't flagged for deletion
thisQuery.filter(flagged_for_deletion=False)
#Sort the query now
thisQuery = thisQuery.order_by('sort_index')
allVals = thisQuery.values_list('sort_index', flat=True)
formPKToLookFor = request.POST['form_pk']
for index, value in enumerate(allVals):
#Our delimiter is "---" for 'sort_index'
label, pkVal = value.split('---')
#Only activate if we find the matching form PK in the list
if formPKToLookFor == pkVal:
#Once we find our match, we simply get the values for the previous and next forms in our list by adding or subtracting from the index
#--Now, what if we are at the first or last form in the list? This will obviously trip an Index Error in Python so let's fix that.
#--We'll add functionality to cycle to the last index if at the beginning, or the first index if at the end
lastIndex = len(allVals)-1
#First test for our previousForm values
if (index-1) < 0: previousForm = allVals[lastIndex].split('---')
else: previousForm = allVals[index-1].split('---')
#Then test for our NextForm values
if (index+1) > lastIndex: nextForm = allVals[0].split('---')
else: nextForm = allVals[index+1].split('---')
#Now create the json string to submit
jsonResponse = '{"previous_label":"'+previousForm[0]+'","previous_pk":"'+previousForm[1]+'","next_label":"'+nextForm[0]+'","next_pk":"'+nextForm[1]+'","current_label":"'+label+'","current_pk":"'+pkVal+'","formtype_pk":"'+request.POST['formtype_pk']+'","project_pk":"'+request.POST['project_pk']+'"}'
return HttpResponse(jsonResponse, content_type="application/json")
#return an indicator to trigger empty "#" links if there is missing data in the POST data
return HttpResponse('{"ERROR":"There were missing POST values in this request--either javascript is deactivated, or maybe someone is trying to do a little client-side hacking Hmm?"}',content_type="application/json")
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 GET_USER_LIST()
#=======================================================#
def get_user_list(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
# :::This function just returns a list of users with their information for the project's userform
# --Obviously it should only give access to those with the admin level permissions. This will not return a pass word, nor allow edits
# --But for privacy reasons, let's keep it limited to level 5 access.
# --The main project control panel will show limited user information to those without access, so let's keep it that way
#------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
returnedJSON = {}
userList = []
returnedJSON['userlist'] = userList
# $$$-SECURITY-$$$: Make sure we filter by the users project as usual
projectUsers = User.objects.all().filter(permissions__project__pk=request.user.permissions.project.pk)
count = len(projectUsers)
for aUser in projectUsers:
currentUser = {}
currentUser['user_id'] = aUser.pk
currentUser['username'] = aUser.username
currentUser['access_level'] = aUser.permissions.access_level
currentUser['name'] = aUser.first_name + " " + aUser.last_name
currentUser['title'] = aUser.permissions.job_title
currentUser['email'] = aUser.email
userList.append(currentUser)
returnedJSON = json.dumps(returnedJSON);
return HttpResponse(returnedJSON,content_type="application/json")
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 5 USERNAME_TAKEN()
#=======================================================#
def username_taken(self, request):
#***************#
ACCESS_LEVEL = 5
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
#:::This function just returns a 'true' or 'false' json response if the submitted 'username' string is already taken
# --This still requires access level 5 because only the admin who can create and manage users should be using it
# --It's not crazy important if someone receives a true or false response--this doesn't change the database, but for
# --confidentiality, someone can't just 'guess' someone's username by typing this in over and over again
# --a public version would need to lock sessions/users attempting it too many times
#------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#Make sure we only take POST requests
if request.method == 'POST':
if User.objects.all().filter(username=request.POST['username']).exists():
return HttpResponse('{"user_exists":"T"}', content_type="application/json")
else:
return HttpResponse('{"user_exists":"F"}', content_type="application/json")
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 NAVIGATE_MASTER_QUERY_PAGINATION() *RECYCLING
#=======================================================#
def navigate_master_query_pagination(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
# This is the real magic of the database in terms of non-geospatial data. This Query engine takes complicated input from json POST data
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
print >>sys.stderr, "do something"
#We need to make sure we have permission to deal with the formtype--e.g. it's part of the user's current project
formtype = FormType.objects.get(pk=request.POST['formtype_id'])
#If the project IDs match, then we're good to go! Also if it's not the project, but the formtype is set to PUBLIC then we are also good to go
if formtype.project.pk == request.user.permissions.project.pk or (formtype.project.pk != request.user.permissions.project.pk and formtype.is_public == True):
#First let's setup our header field of ordered labels
print >>sys.stderr, "Timer Start"
form_att_type_list = []
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for attType in formtype.formrecordattributetype_set.all().filter(flagged_for_deletion=False).order_by('order_number')[:5]:
form_att_type_list.append((attType.order_number,'frat',attType.pk,attType.record_type))
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for refType in formtype.ref_to_parent_formtype.all().filter(flagged_for_deletion=False).order_by('order_number')[:5]:
form_att_type_list.append((refType.order_number,'frrt',refType.pk,refType.record_type))
#sort the new combined reference ad attribute type list combined
form_att_type_list = sorted(form_att_type_list, key=lambda att: att[0])
#we only want the first 5 types
form_att_type_list = form_att_type_list[0:5]
formList = []
#Setup a list to hold the attribute types from the query. We want to show the record types that are part of the search terms,
# --rather than the default types that are in order. If there are less than 5 query record types, use the ordered record type list
# --until 5 are met.
queryRTYPElist = []
uniqueRTYPES = []
rtypeCounter = 1
#We need to check the # of rtypes in our header list now--if it's less than 5, then let's add from the ordered list
#We also need to make sure we aren't adding duplicates of the RTYPES, e.g. if we're looking for a match under "Object Number" and Object Number is already
#--in our sorted order-num list--let's not re-add it.
for attType in form_att_type_list:
print >>sys.stderr, "AttTypeList: " + str(attType)
matchfound = False;
for queryAttType in queryRTYPElist:
if attType[2] == queryAttType[2]:
matchfound = True
if matchfound == False and len(queryRTYPElist) < 5:
#let's arbitrarily add '100' to the order number so that our queries are definitely in front of these
queryRTYPElist.append((attType[0] + 100,attType[1],attType[2],attType[3]))
for q in queryRTYPElist:
print >>sys.stderr, "QTypeList: " + str(q)
print >>sys.stderr, request.POST
#serializeTest = serializers.serialize("json", masterQuery)
queryCounter = 0
logging.info("TEST A")
logging.info("TEST A END")
print >> sys.stderr, request.POST['form_list']
masterQuery = request.POST['form_list'].split(',')
#Figure out if we requested ALL results or just a single page
if request.POST['requestedPageNumber'] != 'ALL':
#Setup our Pagination values given in the POST string
requestedPageNumber = int(request.POST['requestedPageNumber'])
resultsPerPage = int(request.POST['resultsPerPage'])
#Get our queryset slice values
startIndex = (resultsPerPage * requestedPageNumber) - resultsPerPage
endIndex = resultsPerPage * requestedPageNumber
else:
#We are asking for ALL results of this query--could take longer to load
requestedPageNumber = "ALL"
resultsPerPage = request.POST['numberOfResults']
startIndex = 0
endIndex = request.POST['numberOfResults']
print >>sys.stderr, startIndex;
print >>sys.stderr, endIndex;
masterQuery = masterQuery[startIndex:endIndex]
print >>sys.stderr, "TIMER RR"+ " : " + str(time.clock())
#count the query so we only make one database hit before looping(otherwise each loop would be another hit)
for form_pk in masterQuery:
aForm = Form.objects.get(pk=form_pk)
print >>sys.stderr, "TIMER S"+ " : " + str(time.clock())
rowList = []
#Let's loop through each item in the queryRTYPE list and match up the frav's in each queried form so the headers match the form attribute values
for rtype in queryRTYPElist:
if rtype[1] == 'frat':
#print >>sys.stderr, str(rtype[2]) + ' ' + str(aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2]).count())
print >>sys.stderr, "TIMER X"+ " : " + str(time.clock())
formRVAL = aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2])
#We need to check for NULL FRAV's here. When a user manually creates new forms, they don't always have FRAVS created for them if they leave it blank
if formRVAL.exists():
rowList.append((rtype[0],'frav',formRVAL[0].record_value, formRVAL[0].pk))
else:
print >>sys.stderr, "Whoops--something happened. There are no RVALS for 'frats' using: " + str(rtype[2])
print >>sys.stderr, "TIMER Y"+ " : " + str(time.clock())
else:
#for frrt in aForm.ref_to_parent_form.all():
#print >>sys.stderr, "" + str(frrt.pk)
formRVAL = aForm.ref_to_parent_form.all().filter(record_reference_type__pk=rtype[2])
if formRVAL.exists():
formRVAL = formRVAL[0]
#First check to see if there are any relations stored in the many to many relationship
# --if there are, then load them normally, and if not change the value to a frrv-ext tag and store the external ID for the
# --ajax request to process properly
allReferences = formRVAL.record_reference.all()
refCount = allReferences.count()
if refCount > 0:
if allReferences:
#we need to store a list of its references--it's a manytomany relationship
#A comma should be sufficient to separate them, but to be safe--we'll make our delimeter a ^,^
#-- we also need to provide the formtype pk value for the link
listOfRefs = ""
for rec in allReferences:
listOfRefs += str(rec) + '|^|' + str(rec.form_type.pk) + '|^|' + str(rec.pk) + "^,^"
#remove the last delimeter
listOfRefs = listOfRefs[0:-3]
rowList.append((rtype[0],'frrv',listOfRefs, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-ext for the AJAX callable
rowList.append((rtype[0],'frrv-ext',formRVAL.external_key_reference, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-null for the AJAX callable
rowList.append((rtype[0],'frrv-null',"", ""))
print >>sys.stderr, "TIMER Z"+ " : " + str(time.clock())
#sort the new combined reference ad attribute type list combined
rowList = sorted(rowList, key=lambda att: att[0])
# print >> sys.stderr, str(rowList)
#Now let's handle the thumbnail bit of business for the query
#--If the current form IS a media type already, then use itself to grab the thumbnail URI
if aForm.form_type.type == 1:
thumbnailURI = aForm.get_thumbnail_type()
else:
#let's find the first media type in the order but offer a default to "NO PREVIEW" if not found
thumbnailURI = staticfiles_storage.url("/static/site-images/no-thumb-missing.png")
for record in rowList:
#if it's a reference
if record[1] == 'frrv' or record[1] == 'frrv-ext':
currentRTYPE = FormRecordReferenceValue.objects.get(pk=int(record[3]))
#if it's not a NoneType reference:
if currentRTYPE.record_reference_type.form_type_reference != None:
#If its a reference to a media type
if currentRTYPE.record_reference_type.form_type_reference.type == 1:
print >> sys.stderr, "WE GOT A MATCH"
#Because a form record reference value is a ManyToMany relationship, we just grab the first one in the list
#TODO this may need to be edited later--because you can't order the selections. I may add another ForeignKey called
#"Thumbnail Reference" which links to a single relation to a form of a media type--this would also
#probably solve the complexity of looping through to grab it as it stands right now
#****WE also have to check for NULL references
if currentRTYPE.record_reference.all().count() > 0:
thumbnailURI = currentRTYPE.record_reference.all()[0].get_thumbnail_type()
break
#we only want the first 5 values from the final ordered list of attributes
rowList = rowList[0:5]
formList.append([thumbnailURI,str(aForm.pk), aForm, rowList])
print >>sys.stderr, "TIMER ZZ"+ " : " + str(time.clock())
form_att_type_list, form_list = form_att_type_list, formList
finishedJSONquery = {}
headerList=[]
for rtype in queryRTYPElist:
rtypeDict = {}
rtypeDict["index"] = rtype[0]
rtypeDict["rtype"] = rtype[1]
rtypeDict["pk"] = rtype[2]
rtypeDict["name"] = rtype[3]
headerList.append(rtypeDict)
finishedJSONquery["rtype_header"] = headerList
allFormList = []
counter = 0
total = len(formList)
for form in formList:
formDict = {}
formDict["thumbnail_URI"] = form[0]
formDict["pk"] = form[1]
if formtype.is_hierarchical: formDict["form_id"] = form[2].get_hierarchy_label()
else: formDict["form_id"] = form[2].form_name
formRVALS = []
for rval in form[3]:
rvalDict = {}
rvalDict["index"] = rval[0]
rvalDict["rtype"] = rval[1]
rvalDict["value"] = rval[2]
rvalDict["pk"] = rval[3]
formRVALS.append(rvalDict)
formDict["rvals"] = formRVALS
allFormList.append(formDict)
finishedJSONquery["form_list"] = allFormList
finishedJSONquery["formtype"] = formtype.form_type_name
finishedJSONquery["formtype_pk"] = formtype.pk
finishedJSONquery["project_pk"] = request.POST['project_id']
finishedJSONquery["pagination_page"] = requestedPageNumber
finishedJSONquery["resultsCount"] = request.POST['numberOfResults']
finishedJSONquery["pagination_form_list"] = request.POST['form_list']
#save our stats to the returned JSON
#convert to JSON
finishedJSONquery = json.dumps(finishedJSONquery)
print >>sys.stderr, "Timer End"
return HttpResponse(finishedJSONquery, content_type="application/json")
ERROR_MESSAGE += "Error: Trying to access missing or forbidden data"
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 NAVIGATE_QUERY_PAGINATION() *RECYCLING
#=======================================================#
def navigate_query_pagination(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
# This is the real magic of the database in terms of non-geospatial data. This Query engine takes complicated input from json POST data
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
#We need to make sure we have permission to deal with the formtype--e.g. it's part of the user's current project
formtype = FormType.objects.get(pk=request.POST['formtype_id'])
#If the project IDs match, then we're good to go! Also if it's not the project, but the formtype is set to PUBLIC then we are also good to go
if formtype.project.pk == request.user.permissions.project.pk or (formtype.project.pk != request.user.permissions.project.pk and formtype.is_public == True):
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"message":"Loading Json","current_query":"","current_term":"","percent_done":"0","is_complete":"False"}')
progressData.save()
#First let's setup our header field of ordered labels
print >>sys.stderr, "Timer Start"
form_att_type_list = []
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for attType in formtype.formrecordattributetype_set.all().filter(flagged_for_deletion=False).order_by('order_number')[:5]:
form_att_type_list.append((attType.order_number,'frat',attType.pk,attType.record_type))
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for refType in formtype.ref_to_parent_formtype.all().filter(flagged_for_deletion=False).order_by('order_number')[:5]:
form_att_type_list.append((refType.order_number,'frrt',refType.pk,refType.record_type))
#sort the new combined reference ad attribute type list combined
form_att_type_list = sorted(form_att_type_list, key=lambda att: att[0])
#we only want the first 5 types
form_att_type_list = form_att_type_list[0:5]
#Finally let's organize all of our reference and attribute values to match their provided order number
formList = []
#Setup our inital queryset that includes all forms
#***RECYCLING BIN*** Make sure our Forms are filtered by their deletion flags
masterQuery = formtype.form_set.all().filter(flagged_for_deletion=False)
#Setup a list to hold the attribute types from the query. We want to show the record types that are part of the search terms,
# --rather than the default types that are in order. If there are less than 5 query record types, use the ordered record type list
# --until 5 are met.
queryRTYPElist = []
uniqueRTYPES = []
rtypeCounter = 1
#Load the JSON query from POST
masterQueryJSON = json.loads(request.POST['currentQueryJSON'])
#Update our progressbar to show we're at 10%
progressData.jsonString = '{"message":"Performing Query","current_query":"","current_term":"","percent_done":"5","is_complete":"False"}'
progressData.save()
#Loop through each separate query
for query in sorted(masterQueryJSON['query_list']):
print >>sys.stderr, query
#setup a dictionary of key values of the query stats to add to the main querystas dictionary later
singleQueryStats = {}
#***RECYCLING BIN*** Make sure our Forms are filtered by their deletion flags
queriedForms = formtype.form_set.all().filter(flagged_for_deletion=False)
currentJSONQuery = masterQueryJSON['query_list'][query]
uniqueQuery = False
#Let's not allow any duplicate rtypes in the query rtype list header e.g. we don't want "Object ID" to show up 4 times
#--if the user makes a query that compares it 4 times in 4 separate queries
if currentJSONQuery['RTYPE'] not in uniqueRTYPES:
uniqueRTYPES.append(currentJSONQuery['RTYPE'])
uniqueQuery = True
#We need to check whether or not this query is an AND/OR or a null,e.g. the first one(so there is no and/or)
rtype, rtypePK = currentJSONQuery['RTYPE'].split("-")
#store our percentDone variable to update the ajax progress message object
percentDone = 0
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# (FRAT) FormRecordAttributeType Lookups
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
if rtype == 'FRAT':
#thisRTYPE = FormRecordAttributeType.objects.get(pk=rtypePK)
#store the record type in a new rtype list if unique
if uniqueQuery: queryRTYPElist.append((rtypeCounter,'frat',rtypePK,currentJSONQuery['LABEL']))
rtypeCounter += 1
tCounter = 0;
logging.info("TimerA"+ " : " + str(time.clock()))
for term in currentJSONQuery['TERMS']:
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(formrecordattributevalue__record_value__exact=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK)#IS_NULL
#save stats and query
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#CONTAINS
elif term['QCODE'] == '1': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__exact=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (formtype.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK))#IS_NULL
#save stats and query
queriedForms = (newQuery | queriedForms)
logging.info("TimerB"+ " : " + str(time.clock()))
#We'll calculate percent by claiming finishing the query is at 50% when complete and at 20% when starting this section.
logging.info(rtypeCounter)
logging.info(len(masterQueryJSON['query_list']))
Qpercent = ((rtypeCounter-2) * (50.0/len(masterQueryJSON['query_list'])))
percentDone = 5 + Qpercent + (tCounter * (Qpercent / len(currentJSONQuery['TERMS'])) )
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on term: '+term['TVAL']+'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+term['TVAL']+'","percent_done":"'+ str(int(percentDone)) +'","is_complete":"False"}'
progressData.save()
tCounter += 1
logging.info("TimerC"+ " : " + str(time.clock()))
#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
# (FRRT) FormRecordReferenceType Lookups
# This is where things can get complicated. I've added a 'deep' search -- or the ability to search fields from a related model
# --Right now, this just looks at the form IDs of the related field and looks for matches--it will still need to do that, but
# --it also needs to be able to look up FRAT or FRRTs in the same field--that will essentially double the code for this blocks
# --to do all of this, and will also cause the time of the query to significantly increase because we are doing another JOIN in the
# --SQL lookup to span this relationship. This won't affect the list of queried forms directly--they will be limited by what the
# --query finds obviously--but the user will only see the column for the related FRRT that had a match--not specifically the field that matched
# ----It WILL affect the counts for the graphs etc.
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
elif rtype == 'FRRT':
#thisRTYPE = FormRecordReferenceType.objects.get(pk=rtypePK)
#store the record type in a new rtype list if unique
if uniqueQuery: queryRTYPElist.append((rtypeCounter,'frrt',rtypePK,currentJSONQuery['LABEL']))
rtypeCounter += 1
tCounter = 0;
logging.info("TimerD"+ " : " + str(time.clock()))
#get the deep values
deepRTYPE, deepPK = currentJSONQuery['RTYPE-DEEP'].split('-')
for term in currentJSONQuery['TERMS']:
#==========================================================================================================================================================================================
# IF WE ARE JUST LOOKING UP THE RTYPE FORM ID
#==========================================================================================================================================================================================
#TODO: This also needs to check external reference values if no match is found
if deepRTYPE == 'FORMID':
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__icontains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__exact=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK) #IS_NULL
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#CONTAINS
elif term['QCODE'] == '1': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__icontains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__exact=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (formtype.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK))#IS_NULL
queriedForms = (newQuery | queriedForms)
#==========================================================================================================================================================================================
# IF WE ARE LOOKING UP THE RELATIONS FRAT
#==========================================================================================================================================================================================
elif deepRTYPE == 'FRAT':
print >>sys.stderr, "We should be here"
#grab the formtype in question
deepFormType = FormType.objects.get(pk=FormRecordAttributeType.objects.get(pk=deepPK).form_type.pk)
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
#----------------------------------------------------------
# AND STATEMENT FOR A --TERM--
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
#First we Get a flattened list of form pk values from the deepFormType
#Then we filter our current formtype queryset's frrt manytomany pks by the pk value list just created
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
queriedForms = newQuery
#--------------------------------------------------------
# OR STATEMENT FOR a --TERM--
else:
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
queriedForms = (newQuery | queriedForms)
#==========================================================================================================================================================================================
# IF WE ARE LOOKING UP THE RELATION'S FRRT(Only form ID allowed)
#==========================================================================================================================================================================================
elif deepRTYPE == 'FRRT':
print >>sys.stderr, "We should be here 3"
#grab the formtype in question
deepFormType = FormType.objects.get(pk=FormRecordReferenceType.objects.get(pk=deepPK).form_type_parent.pk)
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
#----------------------------------------------------------
# AND STATEMENT FOR A --TERM--
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
#First we Get a flattened list of form pk values from the deepFormType
#Then we filter our current formtype queryset's frrt manytomany pks by the pk value list just created
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #ICONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXACT MATCH
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXCLUDES
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #IS NULL
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
queriedForms = newQuery
#--------------------------------------------------------
# OR STATEMENT FOR a --TERM--
else:
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #ICONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXACT MATCH
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXCLUDES
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #IS NULL
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
queriedForms = (newQuery | queriedForms)
#We'll calculate percent by claiming finishing the query is at 50% when complete and at 20% when starting this section.
Qpercent = ((rtypeCounter-2) * (50.0/len(masterQueryJSON['query_list'])))
percentDone = 5 + Qpercent + (tCounter * (Qpercent / len(currentJSONQuery['TERMS'])) )
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on term: '+term['TVAL']+'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+term['TVAL']+'","percent_done":"'+ str(percentDone) +'","is_complete":"False"}'
progressData.save()
tCounter += 1
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# (Form ID) Lookups
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
elif rtype == "FORMID":
tCounter = 0;
logging.info("TimerD"+ " : " + str(time.clock()))
for term in currentJSONQuery['TERMS']:
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
print >>sys.stderr, str(formtype.form_set.all().filter(form_name__contains=term['TVAL']))
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
print >> sys.stderr, "Is it working?"
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(form_name__contains=term['TVAL']) #CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(form_name__icontains=term['TVAL']) #ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(form_name__exact=term['TVAL'])#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(form_name__contains=term['TVAL'])#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(form_name__isnull=True) #IS_NULL
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (formtype.form_set.all().filter(form_name__contains=term['TVAL']))#CONTAINS
elif term['QCODE'] == '1': newQuery = (formtype.form_set.all().filter(form_name__icontains=term['TVAL']))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (formtype.form_set.all().filter(form_name__exact=term['TVAL']))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (formtype.form_set.all().exclude(form_name__contains=term['TVAL']))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (formtype.form_set.all().filter(form_name__isnull=True))#IS_NULL
queriedForms = (newQuery | queriedForms)
#We'll calculate percent by claiming finishing the query is at 50% when complete and at 20% when starting this section.
Qpercent = ((rtypeCounter-2) * (50.0/len(masterQueryJSON['query_list'])))
percentDone = 5 + Qpercent + (tCounter * (Qpercent / len(currentJSONQuery['TERMS'])) )
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on term: '+term['TVAL']+'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+term['TVAL']+'","percent_done":"'+ str(percentDone) +'","is_complete":"False"}'
progressData.save()
tCounter += 1
#If this is an AND query--attach it to the masterQuery as so.
if currentJSONQuery['Q-ANDOR'] == 'and':
masterQuery = (masterQuery & queriedForms)
#If it's an OR query, attach it to the masterQuery as an OR statement
elif currentJSONQuery['Q-ANDOR'] == 'or':
masterQuery = (masterQuery | queriedForms)
#Otherwise its the first, or a single query and should simply replace the masterQuery
#also set the count to this first query so we have one in case there is only one query
else:
masterQuery = queriedForms;
#Now make sure our final queried list has distint values--merging querysets has a tendency to create duplicates
masterQuery = masterQuery.distinct()
#***RECYCLING BIN*** Make sure our final query gets filtered out with recycled forms(They can potentially be re-added in the above query engine
masterQuery.filter(flagged_for_deletion=False)
#Send a message to our AJAX request object
progressData.jsonString = '{"message":"Running raw SQL","current_query":"","current_term":"''","percent_done":"50","is_complete":"False"}'
progressData.save()
masterQueryCount = masterQuery.count()
#Send a message to our AJAX request object
progressData.jsonString = '{"message":"Loading Queried Forms & Sending generated stats now...","current_query":"","current_term":"''","percent_done":"60","is_complete":"False","stats":"none"}'
progressData.save()
#We need to check the # of rtypes in our header list now--if it's less than 5, then let's add from the ordered list
#We also need to make sure we aren't adding duplicates of the RTYPES, e.g. if we're looking for a match under "Object Number" and Object Number is already
#--in our sorted order-num list--let's not re-add it.
for attType in form_att_type_list:
print >>sys.stderr, "AttTypeList: " + str(attType)
matchfound = False;
for queryAttType in queryRTYPElist:
if attType[2] == queryAttType[2]:
matchfound = True
if matchfound == False and len(queryRTYPElist) < 5:
#let's arbitrarily add '100' to the order number so that our queries are definitely in front of these
queryRTYPElist.append((attType[0] + 100,attType[1],attType[2],attType[3]))
for q in queryRTYPElist:
print >>sys.stderr, "QTypeList: " + str(q)
#serializeTest = serializers.serialize("json", masterQuery)
queryCounter = 0
logging.info("TEST A")
logging.info("TEST A END")
#Figure out if we requested ALL results or just a single page
if request.POST['requestedPageNumber'] != 'ALL':
#Setup our Pagination values given in the POST string
requestedPageNumber = int(request.POST['requestedPageNumber'])
resultsPerPage = int(request.POST['resultsPerPage'])
#Get our queryset slice values
startIndex = (resultsPerPage * requestedPageNumber) - resultsPerPage
endIndex = resultsPerPage * requestedPageNumber
else:
#We are asking for ALL results of this query--could take longer to load
requestedPageNumber = "ALL"
resultsPerPage = request.POST['numberOfResults']
startIndex = 0
endIndex = request.POST['numberOfResults']
print >>sys.stderr, startIndex;
print >>sys.stderr, endIndex;
#-----------------------------------------------------------------------------------------------------------
# Here we need to determine whether or not the form type being queried is hierchical.
# --If it is hierachical, then we just organize the masterQuery and sort it with the hierachy in mind
# --as well as with its hierchical labels--otherwise just perform a normal sort by its label
if formtype.is_hierarchical:
global hierarchyFormList
hierarchyFormList = []
#Finally let's organize all of our reference and attribute values to match their provided order number
#We want to find all the forms that have no parent element first--these are the top of the nodes
#Then we'll organize the forms by hierarchy--which can then be put through the normal ordered query
print >>sys.stderr, "TIMER R"+ " : " + str(time.clock())
masterQuery = masterQuery.filter(hierarchy_parent=None).exclude(form_number=None, form_name=None)[startIndex:endIndex]
print >>sys.stderr, "TIMER RR"+ " : " + str(time.clock())
if masterQuery:
total = masterQuery.count()
for aForm in masterQuery:
queryCounter += 1
Qpercent = ( queryCounter * (30/(total*1.0)))
finalPercent = (60 + int(Qpercent))
progressData.jsonString = '{"SQL":"True","message":"Loading Queried Forms!","current_query":"'+ str(queryCounter) +'","current_term":"'+ str(total) +'","percent_done":"' + str(finalPercent) + '","is_complete":"False","stats":"none"}'
progressData.save()
logging.info(aForm.form_name)
hierarchyFormList.append(aForm)
#Make a recursive function to search through all children
def find_children(currentParentForm):
global hierarchyFormList
for currentChild in currentParentForm.form_set.all():
hierarchyFormList.append(currentChild)
find_children(currentChild)
find_children(aForm)
#reset our masterQuery to our new hierachical list!
masterQuery = hierarchyFormList
else:
print >>sys.stderr, "TIMER R"+ " : " + str(time.clock())
#sort the formlist by their sort_index
masterQuery = masterQuery.order_by('sort_index')[startIndex:endIndex]
print >>sys.stderr, "TIMER RR"+ " : " + str(time.clock())
#count the query so we only make one database hit before looping(otherwise each loop would be another hit)
if masterQuery:
total = masterQuery.count()
print >>sys.stderr, "TIMER RRR"+ " : " + str(time.clock())
for aForm in masterQuery:
print >>sys.stderr, "TIMER S"+ " : " + str(time.clock())
queryCounter += 1
Qpercent = ( queryCounter * (30/(total*1.0)))
finalPercent = (60 + int(Qpercent))
progressData.jsonString = '{"SQL":"True","message":"Loading Queried Forms!","current_query":"'+ str(queryCounter) +'","current_term":"'+ str(total) +'","percent_done":"' + str(finalPercent) + '","is_complete":"False","stats":"none"}'
print >>sys.stderr, "TIMER RRRR"+ " : " + str(time.clock())
progressData.save()
print >>sys.stderr, "TIMER RRRRR"+ " : " + str(time.clock())
# print >>sys.stderr, str(aForm.pk) + ": <!-- Current Form Pk"
rowList = []
#Let's loop through each item in the queryRTYPE list and match up the frav's in each queried form so the headers match the form attribute values
for rtype in queryRTYPElist:
if rtype[1] == 'frat':
#print >>sys.stderr, str(rtype[2]) + ' ' + str(aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2]).count())
print >>sys.stderr, "TIMER X"+ " : " + str(time.clock())
formRVAL = aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2])
#We need to check for NULL FRAV's here. When a user manually creates new forms, they don't always have FRAVS created for them if they leave it blank
if formRVAL.exists():
rowList.append((rtype[0],'frav',formRVAL[0].record_value, formRVAL[0].pk))
else:
print >>sys.stderr, "Whoops--something happened. There are no RVALS for 'frats' using: " + str(rtype[2])
#If there isn't an RVAL for this RTYPE then make a new one and return it instead
newFRAV = FormRecordAttributeValue()
newFRAV.record_attribute_type = FormRecordAttributeType.objects.get(pk=rtype[2])
newFRAV.form_parent = aForm
newFRAV.project = aForm.project
newFRAV.record_value = ""
newFRAV.save()
rowList.append((rtype[0],'frav',newFRAV.record_value, newFRAV.pk))
print >>sys.stderr, "TIMER Y"+ " : " + str(time.clock())
else:
#for frrt in aForm.ref_to_parent_form.all():
#print >>sys.stderr, "" + str(frrt.pk)
formRVAL = aForm.ref_to_parent_form.all().filter(record_reference_type__pk=rtype[2])
if formRVAL.exists():
formRVAL = formRVAL[0]
#First check to see if there are any relations stored in the many to many relationship
# --if there are, then load them normally, and if not change the value to a frrv-ext tag and store the external ID for the
# --ajax request to process properly
allReferences = formRVAL.record_reference.all()
refCount = allReferences.count()
if refCount > 0:
if allReferences:
#we need to store a list of its references--it's a manytomany relationship
#A comma should be sufficient to separate them, but to be safe--we'll make our delimeter a ^,^
#-- we also need to provide the formtype pk value for the link
listOfRefs = ""
for rec in allReferences:
listOfRefs += str(rec) + '|^|' + str(rec.form_type.pk) + '|^|' + str(rec.pk) + "^,^"
#remove the last delimeter
listOfRefs = listOfRefs[0:-3]
rowList.append((rtype[0],'frrv',listOfRefs, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-ext for the AJAX callable
rowList.append((rtype[0],'frrv-ext',formRVAL.external_key_reference, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-null for the AJAX callable
rowList.append((rtype[0],'frrv-null',"", ""))
print >>sys.stderr, "TIMER Z"+ " : " + str(time.clock())
#sort the new combined reference ad attribute type list combined
rowList = sorted(rowList, key=lambda att: att[0])
# print >> sys.stderr, str(rowList)
#Now let's handle the thumbnail bit of business for the query
#--If the current form IS a media type already, then use itself to grab the thumbnail URI
if aForm.form_type.type == 1:
thumbnailURI = aForm.get_thumbnail_type()
else:
#let's find the first media type in the order but offer a default to "NO PREVIEW" if not found
thumbnailURI = staticfiles_storage.url("/static/site-images/no-thumb-missing.png")
for record in rowList:
#if it's a reference
if record[1] == 'frrv' or record[1] == 'frrv-ext':
currentRTYPE = FormRecordReferenceValue.objects.get(pk=int(record[3]))
#if it's not a NoneType reference:
if currentRTYPE.record_reference_type.form_type_reference != None:
#If its a reference to a media type
if currentRTYPE.record_reference_type.form_type_reference.type == 1:
print >> sys.stderr, "WE GOT A MATCH"
#Because a form record reference value is a ManyToMany relationship, we just grab the first one in the list
#TODO this may need to be edited later--because you can't order the selections. I may add another ForeignKey called
#"Thumbnail Reference" which links to a single relation to a form of a media type--this would also
#probably solve the complexity of looping through to grab it as it stands right now
#****WE also have to check for NULL references
if currentRTYPE.record_reference.all().count() > 0:
thumbnailURI = currentRTYPE.record_reference.all()[0].get_thumbnail_type()
break
#we only want the first 5 values from the final ordered list of attributes
rowList = rowList[0:5]
formList.append([thumbnailURI,str(aForm.pk), aForm, rowList])
print >>sys.stderr, "TIMER ZZ"+ " : " + str(time.clock())
form_att_type_list, form_list = form_att_type_list, formList
#update our progress bar
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"90","is_complete":"False","stats":"none"}'
progressData.save()
finishedJSONquery = {}
headerList=[]
for rtype in queryRTYPElist:
rtypeDict = {}
rtypeDict["index"] = rtype[0]
rtypeDict["rtype"] = rtype[1]
rtypeDict["pk"] = rtype[2]
rtypeDict["name"] = rtype[3]
headerList.append(rtypeDict)
#update our progress bar
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"93","is_complete":"False","stats":"none"}'
progressData.save()
finishedJSONquery["rtype_header"] = headerList
allFormList = []
counter = 0
total = len(formList)
for form in formList:
#update our progress bar
counter += 1
currentPercent = 93 + int((counter*(5.0/total)))
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"'+str(currentPercent)+'","is_complete":"False","stats":"none"}'
progressData.save()
formDict = {}
formDict["thumbnail_URI"] = form[0]
formDict["pk"] = form[1]
if formtype.is_hierarchical: formDict["form_id"] = form[2].get_hierarchy_label()
else: formDict["form_id"] = form[2].form_name
formRVALS = []
for rval in form[3]:
rvalDict = {}
rvalDict["index"] = rval[0]
rvalDict["rtype"] = rval[1]
rvalDict["value"] = rval[2]
rvalDict["pk"] = rval[3]
formRVALS.append(rvalDict)
formDict["rvals"] = formRVALS
allFormList.append(formDict)
finishedJSONquery["form_list"] = allFormList
finishedJSONquery["formtype"] = formtype.form_type_name
finishedJSONquery["formtype_pk"] = formtype.pk
finishedJSONquery["project_pk"] = request.user.permissions.project.pk
finishedJSONquery["project"] = request.user.permissions.project.name
finishedJSONquery["pagination_page"] = requestedPageNumber
finishedJSONquery["resultsCount"] = masterQueryCount
finishedJSONquery["currentQuery"] = request.POST['currentQueryJSON']
#save our stats to the returned JSON
#convert to JSON
finishedJSONquery = json.dumps(finishedJSONquery)
#Update our progress bar
progressData.jsonString = '{"message":"Finished!","current_query":"","current_term":"","percent_done":"100","is_complete":"True","stats":"none"}'
progressData.save()
print >>sys.stderr, "Timer End"
return HttpResponse(finishedJSONquery, content_type="application/json")
ERROR_MESSAGE += "Error: You don't have permission to access this FormType from another project"
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access querying this project"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 RUN_MASTER_QUERY_ENGINE() *Recycling
#=======================================================#
def run_master_query_engine(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
# This is the real magic of the database in terms of non-geospatial data. This Query engine takes complicated input from json POST data
# --and runs it through a long complex Django filter series to perform 1 of 2 tasks--the first is to produce a long set of counts in their
# --given search parameters in order to generate several graphs/charts of the data. The second function is to actually produce a list of
# --forms from the provided parameters to inspect and bulk edit.
#
# This takes 3 layers of parameters:
# *The main query, which produces the form results, and has complex search options and AND/OR statements
# *The option constraints query, which acts as an additional parameter when looking for deep counts with a comparison
# *The primary contraints query, which acts as a further nested constraint on the previous 2
# --Essentially each, parameter is an axis of a graph or dimension/each new parameter adds another dimension to that axis. It's more obviously
# --apparent when actually seeing the results of a query
#
# There is a tremendous amount of code--which could probably be reduced in line count and size, but it was my first major foray into Django's%s
# --query engine, so no doubt there are probably redundant lines. It's a bit complex because I needed 3 layers of parameters, and also needed
# --the ability to perform queries when those parameters included relations. I had spent some time looking into nested functions to help deal with
# --what felt like a lot of boiler plate for each section, but--I couldn't figure it out. It works--and I need to move on to other pastures with
# --the project for now.
#
# SPEED: I spent a great deal of time looking for alternative ways to speed up the queries behind this--it does take time. I haven't had a query
# --take longer than a minute, but the potential is there. A minute isn't long in the grand scheme of things, but still. The time it takes to query
# --also depends upon how many forms are part of the query-e.g. the test case of Ceramics in the AL-Hiba project has roughly 110,000 individual forms.
# --A formtype with only 5000 forms wouldn't take time at all to process in comparison. The speed loss comes with nested queries(MYSQL doesn't like these)
# --as well as INNER JOINS when dealing with the relations. I was able to cut the time in half from the first iteration--which is significant, but there
# --are probably other ways I can increase the speed further still. TODO: One option to try is to grab a value list of PKs to submit to another query
# --rather than chaining 2 querysets together(which causes an INNER JOIN in SQL) I tentatively tried this before--but without much success. I know
# --what I'm doing far more now and it's worth trying out again in the future, but for now--this works, and provides user feedback to keep them
# --updated with the goings on behind the curtain.
#
# TODO: I've also moved this into an API Endpoint rather than as a process of the view itself. There may be some strange code decisions left in here
# --as a function of that transition
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
print >>sys.stderr, request.POST
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"message":"Loading Json","current_query":"","current_term":"","percent_done":"0","is_complete":"False"}')
progressData.save()
#We need to loop through EACH project query in the JSON header and create a separate results box for each one
masterProjectQuery = json.loads(request.POST['master_query'])
masterQueryResults = {}
all_project_queries = []
masterQueryResults['final_query_set'] = all_project_queries
query_set = masterProjectQuery['query_list']
globalPercentage = 0
queryPercentage = 0
queryPercentageIncrement = 100 / len(query_set)
queryPercentageCounter = 0
for query in query_set:
print >>sys.stderr, "Starting a query?"
# PROGRESS REPORT *****************************************
#Setup our percentage monitors for the AJAX progress report
queryPercentage = (queryPercentageCounter * queryPercentageIncrement)
queryPercentageCounter += 1
globalPercentage = queryPercentage
progressData.jsonString = '{"message":"Performing Query","current_query":"'+query['project_label']+' : '+query['formtype_label']+'","current_term":"","percent_done":"'+str(globalPercentage)+'","is_complete":"False"}'
progressData.save()
#**********************************************************
queryProject = FormProject.objects.get(pk=query['project_pk'])
queryFormtype = FormType.objects.get(pk=query['formtype_pk'])
#If we are querying a project different than the user and it is NOT set to public, then throw an error because it should be private
if (queryProject.pk != request.user.permissions.project.pk and queryProject.is_public == False) or (queryFormtype.project.pk != request.user.permissions.project.pk and queryFormtype.is_public == False):
ERROR_MESSAGE += "Error: You are trying to access a project or formtype that doesn't exist or access is not allowed. This has been logged to the network administrator"
#Delete Our progress object
print >>sys.stderr, "Hmmm are we exiting here?"
progressData.delete();
#break the loop and return the security message
break;
#Otherwise continue
else:
#create a dictionary to store the query statistics
queryStats = {}
queryStats['formtype'] = query['formtype_label']
queryStats['formtype_pk'] = query['formtype_pk']
queryStats['project'] = query['project_label']
queryStats['project_pk'] = query['project_pk']
queryList = []
queryStats['query_list'] = queryList
primaryConstraintList = []
print >>sys.stderr, queryStats['project_pk'] + " : " + query['project_pk']
#First let's setup our header field of ordered labels
print >>sys.stderr, "Timer Start"
form_att_type_list = []
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for attType in queryFormtype.formrecordattributetype_set.all().filter(flagged_for_deletion=False).order_by('order_number'):
form_att_type_list.append((attType.order_number,'frat',attType.pk,attType.record_type))
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for refType in queryFormtype.ref_to_parent_formtype.all().filter(flagged_for_deletion=False).order_by('order_number'):
form_att_type_list.append((refType.order_number,'frrt',refType.pk,refType.record_type))
#sort the new combined reference ad attribute type list combined
form_att_type_list = sorted(form_att_type_list, key=lambda att: att[0])
#we only want the first 5 types
#form_att_type_list = form_att_type_list[0:5]
#Finally let's organize all of our reference and attribute values to match their provided order number
formList = []
#Setup our inital queryset that includes all forms
masterQuery = queryFormtype.form_set.all().filter(flagged_for_deletion=False)
#Setup a list to hold the attribute types from the query. We want to show the record types that are part of the search terms,
# --rather than the default types that are in order. If there are less than 5 query record types, use the ordered record type list
# --until 5 are met.
queryRTYPElist = []
uniqueRTYPES = []
rtypeCounter = 1
#Load the JSON query from POST
for term in query['terms']:
print >>sys.stderr, query
#setup a dictionary of key values of the query stats to add to the main querystas dictionary later
singleQueryStats = {}
queriedForms = masterQuery
#***RECYCLING BIN*** Make sure our Forms are filtered by their deletion flag
uniqueQuery = False
#Let's not allow any duplicate rtypes in the query rtype list header e.g. we don't want "Object ID" to show up 4 times
#--if the user makes a query that compares it 4 times in 4 separate queries
if (term['pk']+ '_' +term['RTYPE']) not in uniqueRTYPES:
uniqueRTYPES.append((term['pk']+ '_' +term['RTYPE']))
uniqueQuery = True
#We need to check whether or not this query is an AND/OR or a null,e.g. the first one(so there is no and/or)
rtype = term['RTYPE']
rtypePK = term['pk']
print >>sys.stderr, rtype + " : <!----------------------------------------------------------------"
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# (FRAT) FormRecordAttributeType Lookups
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
if rtype == 'FRAT':
#store the record type in a new rtype list if unique
if uniqueQuery: queryRTYPElist.append((rtypeCounter,'frat',rtypePK,term['LABEL']))
rtypeCounter += 1
tCounter = 0;
#store stats
singleQueryStats['rtype_name'] = term['LABEL']
singleQueryStats['rtype_pk'] = rtypePK
singleQueryStats['rtype'] = rtype
termStats = []
singleQueryStats['all_terms'] = termStats
logging.info("TimerA"+ " : " + str(time.clock()))
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
if term['ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(formrecordattributevalue__record_value__exact=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK)#IS_NULL
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (queryFormtype.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#CONTAINS
elif term['QCODE'] == '1': newQuery = (queryFormtype.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (queryFormtype.form_set.all().filter(formrecordattributevalue__record_value__exact=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (queryFormtype.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (queryFormtype.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK))#IS_NULL
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
logging.info("TimerB"+ " : " + str(time.clock()))
logging.info("TimerC"+ " : " + str(time.clock()))
#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
# (FRRT) FormRecordReferenceType Lookups
# This is where things can get complicated. I've added a 'deep' search -- or the ability to search fields from a related model
# --Right now, this just looks at the form IDs of the related field and looks for matches--it will still need to do that, but
# --it also needs to be able to look up FRAT or FRRTs in the same field--that will essentially double the code for this blocks
# --to do all of this, and will also cause the time of the query to significantly increase because we are doing another JOIN in the
# --SQL lookup to span this relationship. This won't affect the list of queried forms directly--they will be limited by what the
# --query finds obviously--but the user will only see the column for the related FRRT that had a match--not specifically the field that matched
# ----It WILL affect the counts for the graphs etc.
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
elif rtype == 'FRRT':
#store the record type in a new rtype list if unique
if uniqueQuery: queryRTYPElist.append((rtypeCounter,'frrt',rtypePK,term['LABEL']))
rtypeCounter += 1
tCounter = 0;
#store stats
singleQueryStats['rtype_name'] = term['LABEL']
singleQueryStats['rtype_pk'] = rtypePK
singleQueryStats['rtype'] = rtype
termStats = []
singleQueryStats['all_terms'] = termStats
#get the deep values
deepPK, deepRTYPE = term['RTYPE-DEEP'].split('__')
print >>sys.stderr, deepPK + " : " + deepRTYPE + " <!-------------------------------------------"
#==========================================================================================================================================================================================
# IF WE ARE JUST LOOKING UP THE RTYPE FORM ID
#==========================================================================================================================================================================================
#TODO: This also needs to check external reference values if no match is found
if deepRTYPE == 'FORMID':
print >> sys.stderr, "WTF"
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
if term['ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__icontains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__exact=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK) #IS_NULL
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#CONTAINS
elif term['QCODE'] == '1': newQuery = (queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__icontains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__exact=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (queryFormtype.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK))#IS_NULL
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#==========================================================================================================================================================================================
# IF WE ARE LOOKING UP THE RELATIONS FRAT
#==========================================================================================================================================================================================
elif deepRTYPE == 'FRAT':
print >>sys.stderr, "We should be here"
#grab the formtype in question
deepFormType = FormType.objects.filter(pk=FormRecordAttributeType.objects.get(pk=deepPK).form_type.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
#----------------------------------------------------------
# AND STATEMENT FOR A --TERM--
if term['ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
#First we Get a flattened list of form pk values from the deepFormType
#Then we filter our current formtype queryset's frrt manytomany pks by the pk value list just created
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
#--------------------------------------------------------
# OR STATEMENT FOR a --TERM--
else:
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#==========================================================================================================================================================================================
# IF WE ARE LOOKING UP THE RELATION'S FRRT(Only form ID allowed)
#==========================================================================================================================================================================================
elif deepRTYPE == 'FRRT':
print >>sys.stderr, "We should be here 3"
#grab the formtype in question
deepFormType = FormType.objects.get(pk=FormRecordReferenceType.objects.get(pk=deepPK).form_type_parent.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
#----------------------------------------------------------
# AND STATEMENT FOR A --TERM--
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
#First we Get a flattened list of form pk values from the deepFormType
#Then we filter our current formtype queryset's frrt manytomany pks by the pk value list just created
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #CONTAINS
print >>sys.stderr, "LOOK HERE ROBERT"
print >>sys.stderr, flattenedSet
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #ICONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXACT MATCH
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXCLUDES
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #IS NULL
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
#--------------------------------------------------------
# OR STATEMENT FOR a --TERM--
else:
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #CONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #ICONTAINS
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXACT MATCH
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXCLUDES
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #IS NULL
newQuery = queryFormtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# (Form ID) Lookups
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
elif rtype == "FORMID":
tCounter = 0;
#store stats
singleQueryStats['rtype_name'] = term['LABEL']
singleQueryStats['rtype_pk'] = rtypePK
singleQueryStats['rtype'] = rtype
termStats = []
singleQueryStats['all_terms'] = termStats
logging.info("TimerD"+ " : " + str(time.clock()))
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
print >>sys.stderr, str(queryFormtype.form_set.all().filter(form_name__contains=term['TVAL']))
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
print >> sys.stderr, "Is it working?"
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(form_name__contains=term['TVAL']) #CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(form_name__icontains=term['TVAL']) #ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(form_name__exact=term['TVAL'])#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(form_name__contains=term['TVAL'])#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(form_name__isnull=True) #IS_NULL
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (queryFormtype.form_set.all().filter(form_name__contains=term['TVAL']))#CONTAINS
elif term['QCODE'] == '1': newQuery = (queryFormtype.form_set.all().filter(form_name__icontains=term['TVAL']))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (queryFormtype.form_set.all().filter(form_name__exact=term['TVAL']))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (queryFormtype.form_set.all().exclude(form_name__contains=term['TVAL']))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (queryFormtype.form_set.all().filter(form_name__isnull=True))#IS_NULL
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
queryList.append(singleQueryStats)
masterQuery = queriedForms
singleQueryStats['ANDOR'] = term['ANDOR']
singleQueryStats['count'] = masterQuery.count()
queryStats['count'] = singleQueryStats['count']
#Send a message to our AJAX request object
progressData.jsonString = '{"message":"Running raw SQL","current_query":"","current_term":"''","percent_done":"50","is_complete":"False"}'
progressData.save()
jsonStats = json.dumps(queryStats)
#Send a message to our AJAX request object
progressData.jsonString = '{"message":"Loading Queried Forms & Sending generated stats now...","current_query":"","current_term":"''","percent_done":"60","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
#Now make sure our final queried list has distint values--merging querysets has a tendency to create duplicates
masterQuery = masterQuery.distinct()
#***RECYCLING BIN*** A Final redundant recycling bin filter--just to be safe
masterQuery = masterQuery.filter(flagged_for_deletion=False)
#We need to check the # of rtypes in our header list now--if it's less than 5, then let's add from the ordered list
#We also need to make sure we aren't adding duplicates of the RTYPES, e.g. if we're looking for a match under "Object Number" and Object Number is already
#--in our sorted order-num list--let's not re-add it.
for attType in form_att_type_list:
print >>sys.stderr, "AttTypeList: " + str(attType)
matchfound = False;
for queryAttType in queryRTYPElist:
if attType[2] == queryAttType[2]:
matchfound = True
if matchfound == False:
#let's arbitrarily add '100' to the order number so that our queries are definitely in front of these
queryRTYPElist.append((attType[0] + 100,attType[1],attType[2],attType[3]))
for q in queryRTYPElist:
print >>sys.stderr, "QTypeList: " + str(q)
#serializeTest = serializers.serialize("json", masterQuery)
queryCounter = 0
logging.info("TEST A")
total = queryStats['count']
paginationTotal = total
logging.info("TEST A END")
print >>sys.stderr, "TIMER HOHOHOHOOHOHOHO START"
#We need to grab ALL the form pk values in a similarly sorted list
paginationQuery = masterQuery.order_by('sort_index')
paginationFormList = []
if paginationQuery:
for form in paginationQuery:
paginationFormList.append(form.pk)
#print >>sys.stderr, paginationFormList
print >>sys.stderr, "TIMER HOHOHOHOOHOHOHO END"
#-----------------------------------------------------------------------------------------------------------
# Here we need to determine whether or not the form type being queried is hierchical.
# --If it is hierachical, then we just organize the masterQuery and sort it with the hierachy in mind
# --as well as with its hierchical labels--otherwise just perform a normal sort by its label
if queryFormtype.is_hierarchical:
global hierarchyFormList
hierarchyFormList = []
#Finally let's organize all of our reference and attribute values to match their provided order number
#We want to find all the forms that have no parent element first--these are the top of the nodes
#Then we'll organize the forms by hierarchy--which can then be put through the normal ordered query
masterQuery = masterQuery.filter(hierarchy_parent=None).exclude(form_number=None, form_name=None)[:25]
#CACHE -- this caches the query for the loop
if masterQuery:
for aForm in masterQuery:
queryCounter += 1
Qpercent = ( queryCounter * (30/(total*1.0)))
finalPercent = (60 + int(Qpercent))
progressData.jsonString = '{"SQL":"True","message":"Loading Queried Forms!","current_query":"'+ str(queryCounter) +'","current_term":"'+ str(total) +'","percent_done":"' + str(finalPercent) + '","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
logging.info(aForm.form_name)
hierarchyFormList.append(aForm)
#Make a recursive function to search through all children
def find_children(currentParentForm):
global hierarchyFormList
for currentChild in currentParentForm.form_set.all():
hierarchyFormList.append(currentChild)
find_children(currentChild)
find_children(aForm)
#reset our masterQuery to our new hierachical list!
masterQuery = hierarchyFormList
else:
#sort the formlist by their sort_index
masterQuery = masterQuery.order_by('sort_index')[:25]
#print >>sys.stderr, masterQuery
#CACHE -- This cache's the query before looping through it
if masterQuery:
for aForm in masterQuery:
queryCounter += 1
Qpercent = ( queryCounter * (30/(total*1.0)))
finalPercent = (60 + int(Qpercent))
progressData.jsonString = '{"SQL":"True","message":"Loading Queried Forms!","current_query":"'+ str(queryCounter) +'","current_term":"'+ str(total) +'","percent_done":"' + str(finalPercent) + '","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
print >>sys.stderr, str(aForm.pk) + ": <!-- Current Form Pk"
rowList = []
#Let's loop through each item in the queryRTYPE list and match up the frav's in each queried form so the headers match the form attribute values
for rtype in queryRTYPElist:
if rtype[1] == 'frat':
print >>sys.stderr, str(rtype[2]) + ' ' + str(aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2]).count())
formRVAL = aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2])
#We need to check for NULL FRAV's here. When a user manually creates new forms, they don't always have FRAVS created for them if they leave it blank
if formRVAL.exists():
rowList.append((rtype[0],'frav',formRVAL[0].record_value, formRVAL[0].pk))
else:
print >>sys.stderr, "Whoops--something happened. There are no RVALS for 'frats' using: " + str(rtype[2])
#If there isn't an RVAL for this RTYPE then make a new one and return it instead
newFRAV = FormRecordAttributeValue()
newFRAV.record_attribute_type = FormRecordAttributeType.objects.get(pk=rtype[2])
newFRAV.form_parent = aForm
newFRAV.project = aForm.project
newFRAV.record_value = ""
newFRAV.save()
rowList.append((rtype[0],'frav',newFRAV.record_value, newFRAV.pk))
else:
print >>sys.stderr, aForm.ref_to_parent_form.all().count()
print >>sys.stderr, aForm.pk
for frrt in aForm.ref_to_parent_form.all():
print >>sys.stderr, "" + str(frrt.pk)
formRVAL = aForm.ref_to_parent_form.all().filter(record_reference_type__pk=rtype[2])
if formRVAL.exists():
formRVAL = formRVAL[0]
#First check to see if there are any relations stored in the many to many relationship
# --if there are, then load them normally, and if not change the value to a frrv-ext tag and store the external ID for the
# --ajax request to process properly
if formRVAL.record_reference.all().count() > 0:
#we need to store a list of its references--it's a manytomany relationship
#A comma should be sufficient to separate them, but to be safe--we'll make our delimeter a ^,^
#-- we also need to provide the formtype pk value for the link
listOfRefs = ""
for rec in formRVAL.record_reference.all():
listOfRefs += str(rec) + '|^|' + str(rec.form_type.pk) + '|^|' + str(rec.pk) + "^,^"
#remove the last delimeter
listOfRefs = listOfRefs[0:-3]
rowList.append((rtype[0],'frrv',listOfRefs, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-ext for the AJAX callable
rowList.append((rtype[0],'frrv-ext',formRVAL.external_key_reference, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-null for the AJAX callable
rowList.append((rtype[0],'frrv-null',"", ""))
#sort the new combined reference ad attribute type list combined
rowList = sorted(rowList, key=lambda att: att[0])
print >> sys.stderr, str(rowList)
#Now let's handle the thumbnail bit of business for the query
#--If the current form IS a media type already, then use itself to grab the thumbnail URI
if aForm.form_type.type == 1:
thumbnailURI = aForm.get_thumbnail_type()
else:
#let's find the first media type in the order but offer a default to "NO PREVIEW" if not found
thumbnailURI = staticfiles_storage.url("/static/site-images/no-thumb-missing.png")
for record in rowList:
#if it's a reference
if record[1] == 'frrv' or record[1] == 'frrv-ext':
currentRTYPE = FormRecordReferenceValue.objects.get(pk=int(record[3]))
#if it's not a NoneType reference:
if currentRTYPE.record_reference_type.form_type_reference != None:
#If its a reference to a media type
if currentRTYPE.record_reference_type.form_type_reference.type == 1:
print >> sys.stderr, "WE GOT A MATCH"
#Because a form record reference value is a ManyToMany relationship, we just grab the first one in the list
#TODO this may need to be edited later--because you can't order the selections. I may add another ForeignKey called
#"Thumbnail Reference" which links to a single relation to a form of a media type--this would also
#probably solve the complexity of looping through to grab it as it stands right now
#****WE also have to check for NULL references
if currentRTYPE.record_reference.all().count() > 0:
thumbnailURI = currentRTYPE.record_reference.all()[0].get_thumbnail_type()
break
#we only want the first 5 values from the final ordered list of attributes
#rowList = rowList[0:5]
formList.append([thumbnailURI,str(aForm.pk), aForm, rowList])
form_att_type_list, form_list = form_att_type_list, formList
#update our progress bar
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"90","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
finishedJSONquery = {}
headerList=[]
for rtype in queryRTYPElist:
rtypeDict = {}
rtypeDict["index"] = rtype[0]
rtypeDict["rtype"] = rtype[1]
rtypeDict["pk"] = rtype[2]
rtypeDict["name"] = rtype[3]
headerList.append(rtypeDict)
#update our progress bar
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"93","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
finishedJSONquery["rtype_header"] = headerList
allFormList = []
counter = 0
total = len(formList)
for form in formList:
#update our progress bar
counter += 1
currentPercent = 93 + int((counter*(5.0/total)))
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"'+str(currentPercent)+'","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
formDict = {}
formDict["thumbnail_URI"] = form[0]
formDict["pk"] = form[1]
if queryFormtype.is_hierarchical: formDict["form_id"] = form[2].get_hierarchy_label()
else: formDict["form_id"] = form[2].form_name
formRVALS = []
for rval in form[3]:
rvalDict = {}
rvalDict["index"] = rval[0]
rvalDict["rtype"] = rval[1]
rvalDict["value"] = rval[2]
rvalDict["pk"] = rval[3]
formRVALS.append(rvalDict)
formDict["rvals"] = formRVALS
allFormList.append(formDict)
finishedJSONquery["form_list"] = allFormList
finishedJSONquery["currentQuery"] = request.POST['master_query']
finishedJSONquery["totalResultCount"] = paginationTotal
finishedJSONquery['formtype'] = query['formtype_label']
finishedJSONquery['formtype_pk'] = query['formtype_pk']
finishedJSONquery['project'] = query['project_label']
finishedJSONquery['project_pk'] = query['project_pk']
finishedJSONquery['pagination_form_list'] = paginationFormList
finishedJSONquery['query_stats'] = queryStats
all_project_queries.append(finishedJSONquery)
#convert to JSON
all_project_queries = json.dumps(all_project_queries)
#Update our progress bar
progressData.jsonString = '{"message":"Finished!","current_query":"","current_term":"","percent_done":"100","is_complete":"True","stats":''}'
progressData.save()
print >>sys.stderr, "Timer End"
return HttpResponse(all_project_queries, content_type="application/json")
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access querying this project"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 RUN_QUERY_ENGINE() *Recycling
#=======================================================#
def run_query_engine(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
# This is the real magic of the database in terms of non-geospatial data. This Query engine takes complicated input from json POST data
# --and runs it through a long complex Django filter series to perform 1 of 2 tasks--the first is to produce a long set of counts in their
# --given search parameters in order to generate several graphs/charts of the data. The second function is to actually produce a list of
# --forms from the provided parameters to inspect and bulk edit.
#
# This takes 3 layers of parameters:
# *The main query, which produces the form results, and has complex search options and AND/OR statements
# *The option constraints query, which acts as an additional parameter when looking for deep counts with a comparison
# *The primary contraints query, which acts as a further nested constraint on the previous 2
# --Essentially each, parameter is an axis of a graph or dimension/each new parameter adds another dimension to that axis. It's more obviously
# --apparent when actually seeing the results of a query
#
# There is a tremendous amount of code--which could probably be reduced in line count and size, but it was my first major foray into Django's%s
# --query engine, so no doubt there are probably redundant lines. It's a bit complex because I needed 3 layers of parameters, and also needed
# --the ability to perform queries when those parameters included relations. I had spent some time looking into nested functions to help deal with
# --what felt like a lot of boiler plate for each section, but--I couldn't figure it out. It works--and I need to move on to other pastures with
# --the project for now.
#
# SPEED: I spent a great deal of time looking for alternative ways to speed up the queries behind this--it does take time. I haven't had a query
# --take longer than a minute, but the potential is there. A minute isn't long in the grand scheme of things, but still. The time it takes to query
# --also depends upon how many forms are part of the query-e.g. the test case of Ceramics in the AL-Hiba project has roughly 110,000 individual forms.
# --A formtype with only 5000 forms wouldn't take time at all to process in comparison. The speed loss comes with nested queries(MYSQL doesn't like these)
# --as well as INNER JOINS when dealing with the relations. I was able to cut the time in half from the first iteration--which is significant, but there
# --are probably other ways I can increase the speed further still. TODO: One option to try is to grab a value list of PKs to submit to another query
# --rather than chaining 2 querysets together(which causes an INNER JOIN in SQL) I tentatively tried this before--but without much success. I know
# --what I'm doing far more now and it's worth trying out again in the future, but for now--this works, and provides user feedback to keep them
# --updated with the goings on behind the curtain.
#
# TODO: I've also moved this into an API Endpoint rather than as a process of the view itself. There may be some strange code decisions left in here
# --as a function of that transition
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
if request.method == 'POST':
#We need to make sure we have permission to deal with the formtype--e.g. it's part of the user's current project
formtype = FormType.objects.get(pk=request.POST['formtype_id'])
#If the project IDs match, then we're good to go!
if formtype.project.pk == request.user.permissions.project.pk:
#Make the AJAX Request Data Model for subsequent AJAX calls
progressData = AJAXRequestData(uuid=request.POST.get('uuid'), jsonString='{"message":"Loading Json","current_query":"","current_term":"","percent_done":"0","is_complete":"False"}')
progressData.save()
#create a dictionary to store the query statistics
queryStats = {}
queryStats['formtype'] = formtype.form_type_name
queryStats['formtype_pk'] = formtype.pk
queryList = []
queryStats['query_list'] = queryList
primaryConstraintList = []
#First let's setup our header field of ordered labels
print >>sys.stderr, "Timer Start"
form_att_type_list = []
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for attType in formtype.formrecordattributetype_set.all().filter(flagged_for_deletion=False).order_by('order_number')[:5]:
form_att_type_list.append((attType.order_number,'frat',attType.pk,attType.record_type))
#***RECYCLING BIN*** Make sure our RTYPES are filtered by their deletion flags
for refType in formtype.ref_to_parent_formtype.all().filter(flagged_for_deletion=False).order_by('order_number')[:5]:
form_att_type_list.append((refType.order_number,'frrt',refType.pk,refType.record_type))
#sort the new combined reference ad attribute type list combined
form_att_type_list = sorted(form_att_type_list, key=lambda att: att[0])
#we only want the first 5 types
form_att_type_list = form_att_type_list[0:5]
#Finally let's organize all of our reference and attribute values to match their provided order number
formList = []
#Setup our inital queryset that includes all forms
masterQuery = formtype.form_set.all()
#Setup a list to hold the attribute types from the query. We want to show the record types that are part of the search terms,
# --rather than the default types that are in order. If there are less than 5 query record types, use the ordered record type list
# --until 5 are met.
queryRTYPElist = []
uniqueRTYPES = []
rtypeCounter = 1
#Load the JSON query from POST
masterQueryJSON = json.loads(request.POST['query'])
#Update our progressbar to show we're at 10%
progressData.jsonString = '{"message":"Performing Query","current_query":"","current_term":"","percent_done":"5","is_complete":"False"}'
progressData.save()
#Loop through each separate query
for query in sorted(masterQueryJSON['query_list']):
print >>sys.stderr, query
#setup a dictionary of key values of the query stats to add to the main querystas dictionary later
singleQueryStats = {}
queriedForms = formtype.form_set.all()
#***RECYCLING BIN*** Make sure our Forms are filtered by their deletion flags
queriedForms.filter(flagged_for_deletion=False)
currentJSONQuery = masterQueryJSON['query_list'][query]
uniqueQuery = False
#Let's not allow any duplicate rtypes in the query rtype list header e.g. we don't want "Object ID" to show up 4 times
#--if the user makes a query that compares it 4 times in 4 separate queries
if currentJSONQuery['RTYPE'] not in uniqueRTYPES:
uniqueRTYPES.append(currentJSONQuery['RTYPE'])
uniqueQuery = True
#We need to check whether or not this query is an AND/OR or a null,e.g. the first one(so there is no and/or)
rtype, rtypePK = currentJSONQuery['RTYPE'].split("-")
#store our percentDone variable to update the ajax progress message object
percentDone = 0
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# (FRAT) FormRecordAttributeType Lookups
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
if rtype == 'FRAT':
#thisRTYPE = FormRecordAttributeType.objects.get(pk=rtypePK)
#store the record type in a new rtype list if unique
if uniqueQuery: queryRTYPElist.append((rtypeCounter,'frat',rtypePK,currentJSONQuery['LABEL']))
rtypeCounter += 1
tCounter = 0;
#store stats
singleQueryStats['rtype_name'] = currentJSONQuery['LABEL']
singleQueryStats['rtype_pk'] = rtypePK
singleQueryStats['rtype'] = rtype
termStats = []
singleQueryStats['all_terms'] = termStats
logging.info("TimerA"+ " : " + str(time.clock()))
for term in currentJSONQuery['TERMS']:
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(formrecordattributevalue__record_value__exact=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK)#IS_NULL
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#CONTAINS
elif term['QCODE'] == '1': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__exact=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (formtype.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (formtype.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK))#IS_NULL
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
logging.info("TimerB"+ " : " + str(time.clock()))
#We'll calculate percent by claiming finishing the query is at 50% when complete and at 20% when starting this section.
logging.info(rtypeCounter)
logging.info(len(masterQueryJSON['query_list']))
Qpercent = ((rtypeCounter-2) * (50.0/len(masterQueryJSON['query_list'])))
logging.info(Qpercent)
logging.info(len(currentJSONQuery['TERMS']))
percentDone = 5 + Qpercent + (tCounter * (Qpercent / len(currentJSONQuery['TERMS'])) )
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on term: '+term['TVAL']+'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+term['TVAL']+'","percent_done":"'+ str(int(percentDone)) +'","is_complete":"False"}'
progressData.save()
tCounter += 1
logging.info("TimerC"+ " : " + str(time.clock()))
#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
# (FRRT) FormRecordReferenceType Lookups
# This is where things can get complicated. I've added a 'deep' search -- or the ability to search fields from a related model
# --Right now, this just looks at the form IDs of the related field and looks for matches--it will still need to do that, but
# --it also needs to be able to look up FRAT or FRRTs in the same field--that will essentially double the code for this blocks
# --to do all of this, and will also cause the time of the query to significantly increase because we are doing another JOIN in the
# --SQL lookup to span this relationship. This won't affect the list of queried forms directly--they will be limited by what the
# --query finds obviously--but the user will only see the column for the related FRRT that had a match--not specifically the field that matched
# ----It WILL affect the counts for the graphs etc.
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#########################################$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$
elif rtype == 'FRRT':
#thisRTYPE = FormRecordReferenceType.objects.get(pk=rtypePK)
#store the record type in a new rtype list if unique
if uniqueQuery: queryRTYPElist.append((rtypeCounter,'frrt',rtypePK,currentJSONQuery['LABEL']))
rtypeCounter += 1
tCounter = 0;
#store stats
singleQueryStats['rtype_name'] = currentJSONQuery['LABEL'] + currentJSONQuery['DEEP-LABEL']
singleQueryStats['rtype_pk'] = rtypePK
singleQueryStats['rtype'] = rtype
termStats = []
singleQueryStats['all_terms'] = termStats
logging.info("TimerD"+ " : " + str(time.clock()))
#get the deep values
deepRTYPE, deepPK = currentJSONQuery['RTYPE-DEEP'].split('-')
for term in currentJSONQuery['TERMS']:
#==========================================================================================================================================================================================
# IF WE ARE JUST LOOKING UP THE RTYPE FORM ID
#==========================================================================================================================================================================================
#TODO: This also needs to check external reference values if no match is found
if deepRTYPE == 'FORMID':
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__icontains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__exact=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK) #IS_NULL
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#CONTAINS
elif term['QCODE'] == '1': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__icontains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__form_name__exact=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (formtype.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (formtype.form_set.all().filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK))#IS_NULL
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#==========================================================================================================================================================================================
# IF WE ARE LOOKING UP THE RELATIONS FRAT
#==========================================================================================================================================================================================
elif deepRTYPE == 'FRAT':
print >>sys.stderr, "We should be here"
#grab the formtype in question
deepFormType = FormType.objects.filter(pk=FormRecordAttributeType.objects.get(pk=deepPK).form_type.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
#----------------------------------------------------------
# AND STATEMENT FOR A --TERM--
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
#First we Get a flattened list of form pk values from the deepFormType
#Then we filter our current formtype queryset's frrt manytomany pks by the pk value list just created
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
#--------------------------------------------------------
# OR STATEMENT FOR a --TERM--
else:
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=term['TVAL'], formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=deepPK).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#==========================================================================================================================================================================================
# IF WE ARE LOOKING UP THE RELATION'S FRRT(Only form ID allowed)
#==========================================================================================================================================================================================
elif deepRTYPE == 'FRRT':
print >>sys.stderr, "We should be here 3"
#grab the formtype in question
deepFormType = FormType.objects.filter(pk=FormRecordReferenceType.objects.get(pk=deepPK).form_type_parent.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
#----------------------------------------------------------
# AND STATEMENT FOR A --TERM--
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
#Now let's figure out the QCODE, e.g. contains, match exact etc.
#First we Get a flattened list of form pk values from the deepFormType
#Then we filter our current formtype queryset's frrt manytomany pks by the pk value list just created
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #CONTAINS
print >>sys.stderr, "LOOK HERE ROBERT"
print >>sys.stderr, flattenedSet
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #ICONTAINS
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXACT MATCH
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXCLUDES
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #IS NULL
newQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
#--------------------------------------------------------
# OR STATEMENT FOR a --TERM--
else:
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #CONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #ICONTAINS
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXACT MATCH
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=term['TVAL']).values_list('pk', flat=True)) #EXCLUDES
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif term['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #IS NULL
newQuery = formtype.form_set.all().filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#We'll calculate percent by claiming finishing the query is at 50% when complete and at 20% when starting this section.
Qpercent = ((rtypeCounter-2) * (50.0/len(masterQueryJSON['query_list'])))
percentDone = 5 + Qpercent + (tCounter * (Qpercent / len(currentJSONQuery['TERMS'])) )
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on term: '+term['TVAL']+'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+term['TVAL']+'","percent_done":"'+ str(percentDone) +'","is_complete":"False"}'
progressData.save()
tCounter += 1
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# (Form ID) Lookups
#########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&########################################&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
elif rtype == "FORMID":
tCounter = 0;
#store stats
singleQueryStats['rtype_name'] = currentJSONQuery['LABEL']
singleQueryStats['rtype_pk'] = rtypePK
singleQueryStats['rtype'] = rtype
termStats = []
singleQueryStats['all_terms'] = termStats
logging.info("TimerD"+ " : " + str(time.clock()))
for term in currentJSONQuery['TERMS']:
#Now begin modifying the SQL query which each term of each individual query
#skip the term if the field was left blank
if term['TVAL'] != "" or term['QCODE'] == '4':
newQuery = None
print >>sys.stderr, str(formtype.form_set.all().filter(form_name__contains=term['TVAL']))
if term['T-ANDOR'] != 'or':#We can assume it is an AND like addition if it's anything but 'or'
print >> sys.stderr, "Is it working?"
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = queriedForms.filter(form_name__contains=term['TVAL']) #CONTAINS
elif term['QCODE'] == '1': newQuery = queriedForms.filter(form_name__icontains=term['TVAL']) #ICONTAINS
elif term['QCODE'] == '2': newQuery = queriedForms.filter(form_name__exact=term['TVAL'])#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = queriedForms.exclude(form_name__contains=term['TVAL'])#EXCLUDES
elif term['QCODE'] == '4': newQuery = queriedForms.filter(form_name__isnull=True) #IS_NULL
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = newQuery
else:#Otherwise it's an OR statement
#Now let's figure out the QCODE, e.g. contains, match exact etc.
if term['QCODE'] == '0': newQuery = (formtype.form_set.all().filter(form_name__contains=term['TVAL']))#CONTAINS
elif term['QCODE'] == '1': newQuery = (formtype.form_set.all().filter(form_name__icontains=term['TVAL']))#ICONTAINS
elif term['QCODE'] == '2': newQuery = (formtype.form_set.all().filter(form_name__exact=term['TVAL']))#MATCHES EXACT
elif term['QCODE'] == '3': newQuery = (formtype.form_set.all().exclude(form_name__contains=term['TVAL']))#EXCLUDES
elif term['QCODE'] == '4': newQuery = (formtype.form_set.all().filter(form_name__isnull=True))#IS_NULL
#***RECYCLING BIN*** Make sure our NEW query is always filtered by recycling bin flags--All OR statements will need this filter
newQuery = newQuery.filter(flagged_for_deletion=False)
#save stats and query
term['count'] = newQuery.count()
termStats.append(term)
queriedForms = (newQuery | queriedForms)
#We'll calculate percent by claiming finishing the query is at 50% when complete and at 20% when starting this section.
Qpercent = ((rtypeCounter-2) * (50.0/len(masterQueryJSON['query_list'])))
percentDone = 5 + Qpercent + (tCounter * (Qpercent / len(currentJSONQuery['TERMS'])) )
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on term: '+term['TVAL']+'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+term['TVAL']+'","percent_done":"'+ str(percentDone) +'","is_complete":"False"}'
progressData.save()
tCounter += 1
logging.info("Timer1"+ " : " + str(time.clock()))
#add stats to the query stats
singleQueryStats['ANDOR'] = currentJSONQuery['Q-ANDOR']
singleQueryStats['count'] = queriedForms.count()
logging.info("Timer3"+ " : " + str(time.clock()))
queryList.append(singleQueryStats)
#If this is an AND query--attach it to the masterQuery as so.
if currentJSONQuery['Q-ANDOR'] == 'and':
logging.info("TimerR"+ " : " + str(time.clock()))
masterQuery = (masterQuery & queriedForms)
singleQueryStats['intersections'] = masterQuery.count()
#if this is the last query--go ahead and grab this count for the aggregate query--this helps up from doing another redundant time-consuming masterQuery.count() later
if rtypeCounter-1 == len(masterQueryJSON['query_list']): queryStats['count'] = singleQueryStats['intersections']
logging.info("TimerU"+ " : " + str(time.clock()) + " : " + str(singleQueryStats['intersections']))
#If it's an OR query, attach it to the masterQuery as an OR statement
elif currentJSONQuery['Q-ANDOR'] == 'or':
logging.info("TimerX"+ " : " + str(time.clock()))
masterQuery = (masterQuery | queriedForms)
singleQueryStats['additions'] = masterQuery.count()
#if this is the last query--go ahead and grab this count for the aggregate query--this helps up from doing another redundant time-consuming masterQuery.count() later
if rtypeCounter-1 == len(masterQueryJSON['query_list']): queryStats['count'] = singleQueryStats['additions']
logging.info("TimerZZ"+ " : " + str(time.clock()))
#Otherwise its the first, or a single query and should simply replace the masterQuery
#also set the count to this first query so we have one in case there is only one query
else:
print >> sys.stderr, "Master Query assignment??"
masterQuery = queriedForms;
queryStats['count'] = singleQueryStats['count']
logging.info("TimerF"+ " : " + str(time.clock()))
#--------------------------------------------------------------------------------------------------------------------
# CONSTRAINTS
#
#Let's add a count for our constraints and some information about the constraints
#These are just used to flesh out more information for graphs, and don't produce queried results
#--Doing it this way will improve the speed of queries significantly, as we don't NEED to get individual database
#--record information for each query--just count()'s -- These will all essentially be 'AND' statements for the query
#--!!!Make sure we are using this specific query's queryset and not the amalgamated masterQuery--otherwise each constraint will be affected
constraints = []
singleQueryStats['constraints'] = constraints
counter = 0
total = len(masterQueryJSON['constraint_list'])
for aConstraint in masterQueryJSON['constraint_list']:
print >>sys.stderr, aConstraint
logging.info("TimerY START" + " : " + str(time.clock()))
constraint = masterQueryJSON['constraint_list'][aConstraint]
#Send our progresss update message
counter += 1
constraintPercentDone = int(percentDone + (counter *(5.0/total)))
progressData.jsonString = '{"message":"Performing Query # '+ str(rtypeCounter-1) + ' on constraint: '+constraint['LABEL']+ ' : ' + constraint['TVAL'] +'","current_query":"'+ currentJSONQuery['RTYPE'] + '","current_term":"'+str(percentDone)+'","percent_done":"'+ str(constraintPercentDone) +'","is_complete":"False"}'
progressData.save()
singleConstraintStat = {}
#Only check if the entry box was filled in--if it's blank then do nothing and ignore it
if constraint['TVAL'] != "" or constraint['QCODE'] == '4':
#Check whether or not it's a frat or frrt
#We don't use an 'else' statement because I want to make sure that if someone edits the json before
#sending, that it will do nothing if it doesn't get the proper code
rtype, rtypePK = constraint['RTYPE'].split("-")
if rtype == 'FRAT':
logging.info("TimerZ START" + " : " + str(time.clock()))
if constraint['QCODE'] == '0': constraintQuery = queriedForms.filter(pk__in=list(formtype.form_set.all().filter(formrecordattributevalue__record_value__contains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)))
#if constraint['QCODE'] == '0': constraintQuery = (queriedForms & formtype.form_set.all().filter(formrecordattributevalue__record_value__contains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)).count()#CONTAINS
#if constraint['QCODE'] == '0': constraintQuery = queriedForms.filter(formrecordattributevalue__record_value__contains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).count()#CONTAINS
elif constraint['QCODE'] == '1': constraintQuery = queriedForms.filter(formrecordattributevalue__record_value__icontains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#ICONTAINS
elif constraint['QCODE'] == '2': constraintQuery = queriedForms.filter(formrecordattributevalue__record_value__exact=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#MATCHES EXACT
elif constraint['QCODE'] == '3': constraintQuery = queriedForms.exclude(formrecordattributevalue__record_value__icontains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#EXCLUDES
elif constraint['QCODE'] == '4': constraintQuery = queriedForms.filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK)#IS_NULL
logging.info("TimerZ END" + "-- : " + str(time.clock()))
elif rtype == 'FORMID':
if constraint['QCODE'] == '0': constraintQuery = queriedForms.filter(form_name__contains=constraint['TVAL']) #CONTAINS
elif constraint['QCODE'] == '1': constraintQuery = queriedForms.filter(form_name__icontains=constraint['TVAL']) #ICONTAINS
elif constraint['QCODE'] == '2': constraintQuery = queriedForms.filter(form_name__exact=constraint['TVAL'])#MATCHES EXACT
elif constraint['QCODE'] == '3': constraintQuery = queriedForms.exclude(form_name__contains=constraint['TVAL'])#EXCLUDES
elif constraint['QCODE'] == '4': constraintQuery = queriedForms.filter(form_name__isnull=True) #IS_NULL
elif rtype == 'FRRT_ID':
deepFormType = FormType.objects.filter(pk=FormRecordReferenceType.objects.get(pk=rtypePK).form_type_parent.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
if constraint['QCODE'] == '0': constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__contains=constraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#CONTAINS
elif constraint['QCODE'] == '1': constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__icontains=constraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#ICONTAINS
elif constraint['QCODE'] == '2': constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__form_name__exact=constraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#MATCHES EXACT
elif constraint['QCODE'] == '3': constraintQuery = queriedForms.exclude(ref_to_parent_form__record_reference__form_name__icontains=constraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#EXCLUDES
elif constraint['QCODE'] == '4': constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK).count()#IS_NULL
elif rtype == 'DEEP_FRRT':
deepFormType = FormType.objects.filter(pk=FormRecordReferenceType.objects.get(pk=rtypePK).form_type_parent.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
if constraint['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=constraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__icontains=constraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__exact=constraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=constraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #ICONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif rtype == 'DEEP_FRAT':
deepFormType = FormType.objects.filter(pk=FormRecordAttributeType.objects.get(pk=rtypePK).form_type.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
print >>sys.stderr, deepFormType
print >>sys.stderr, rtypePK
if constraint['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
print >>sys.stderr, "WHAT?!?!?!?! " + str(len(flattenedSet)) + " <!--------------------------------------------------"
print >>sys.stderr, flattenedSet
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__exact=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=constraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif constraint['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
constraintQuery = queriedForms.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#***RECYCLING BIN*** Make sure our NEW Constraints query is always filtered by recycling bin flags
constraintQuery = constraintQuery.filter(flagged_for_deletion=False)
singleConstraintStat['count'] = constraintQuery.count()
singleConstraintStat['name'] = constraint['LABEL']
singleConstraintStat['rtype_pk'] = rtypePK
singleConstraintStat['rtype'] = rtype
singleConstraintStat['qcode'] = constraint['QCODE']
singleConstraintStat['tval'] = constraint['TVAL']
constraints.append(singleConstraintStat)
logging.info("TimerY END" + "-- : " + str(time.clock()))
#--------------------------------------------------------------------------------------------------------------------
# PRIMARY CONSTRAINTS
#
#Let's add a count for our primary constraints and some information about them
#These are just used to flesh out more information for graphs, and don't produce queried results
#--Doing it this way will improve the speed of queries significantly, as we don't NEED to get individual database
#--record information for each query--just count()'s -- These will all essentially be 'AND' statements for the query
#--!!!Make sure we are using this specific query's queryset and not the amalgamated masterQuery--otherwise each constraint will be affected
#--This also differs from a normal constraint in that a Primary constraint is seen as another dimensional control over the results.
#--This runs within each CONSTRAINT LOOP
pCounter = 0
if 'primary_constraints' in masterQueryJSON:
for aPrimaryConstraint in masterQueryJSON['primary_constraints']:
pConstraint = masterQueryJSON['primary_constraints'][aPrimaryConstraint]
#Only set up and initialize the dictionary for the first loop through the contraints--we won't need them for successive primary constraint loops--they're the same.
#We'll rely on indexing at that point to fill out the data[] array for the constraints
if len(primaryConstraintList) < len(masterQueryJSON['primary_constraints']):
print >>sys.stderr, "NEW PRIMARY CONSTRAINT"
newPConstraint = {}
currentDataList = []
newPConstraint['name'] = pConstraint['LABEL']
newPConstraint['qcode'] = pConstraint['QCODE']
newPConstraint['tval'] = pConstraint['TVAL']
newPConstraint['data'] = currentDataList
primaryConstraintList.append(newPConstraint)
else:
print >>sys.stderr, "OLD PRIMARY CONSTRAINT: "+ str(counter) + " : " + str(pCounter) + str(primaryConstraintList)
currentPConstraint = primaryConstraintList[pCounter]
currentDataList = currentPConstraint['data']
#Only check if the entry box was filled in--if it's blank then do nothing and ignore it
if pConstraint['TVAL'] != "" or pConstraint['QCODE'] == '4':
#Check whether or not it's a frat or frrt
#We don't use an 'else' statement because I want to make sure that if someone edits the json before
#sending, that it will do nothing if it doesn't get the proper code
rtype, rtypePK = pConstraint['RTYPE'].split("-")
if rtype == 'FRAT':
logging.info("TimerKK START" + " : " + str(time.clock()))
if pConstraint['QCODE'] == '0': primaryConstraintQuery = constraintQuery.filter(pk__in=list(formtype.form_set.all().filter(formrecordattributevalue__record_value__contains=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)))
elif pConstraint['QCODE'] == '1': primaryConstraintQuery = constraintQuery.filter(formrecordattributevalue__record_value__icontains=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#ICONTAINS
elif pConstraint['QCODE'] == '2': primaryConstraintQuery = constraintQuery.filter(formrecordattributevalue__record_value__exact=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#MATCHES EXACT
elif pConstraint['QCODE'] == '3': primaryConstraintQuery = constraintQuery.exclude(formrecordattributevalue__record_value__icontains=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK)#EXCLUDES
elif pConstraint['QCODE'] == '4': primaryConstraintQuery = constraintQuery.filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK)#IS_NULL
elif rtype == 'FRRT':
if pConstraint['QCODE'] == '0': primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__form_name__contains=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#CONTAINS
elif pConstraint['QCODE'] == '1': primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__form_name__icontains=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#ICONTAINS
elif pConstraint['QCODE'] == '2': primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__form_name__exact=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#MATCHES EXACT
elif pConstraint['QCODE'] == '3': primaryConstraintQuery = constraintQuery.exclude(ref_to_parent_form__record_reference__form_name__icontains=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#EXCLUDES
elif pConstraint['QCODE'] == '4': primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK)#IS_NULL
logging.info("TimerKK END" + "-- : " + str(time.clock()))
elif rtype == 'FORMID':
if pConstraint['QCODE'] == '0': primaryConstraintQuery = constraintQuery.filter(form_name__contains=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #CONTAINS
elif pConstraint['QCODE'] == '1': primaryConstraintQuery = constraintQuery.filter(form_name__icontains=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK) #ICONTAINS
elif pConstraint['QCODE'] == '2': primaryConstraintQuery = constraintQuery.filter(form_name__exact=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#MATCHES EXACT
elif pConstraint['QCODE'] == '3': primaryConstraintQuery = constraintQuery.exclude(form_name__contains=pConstraint['TVAL'], ref_to_parent_form__record_reference_type__pk=rtypePK)#EXCLUDES
elif pConstraint['QCODE'] == '4': primaryConstraintQuery = constraintQuery.filter(form_name__isnull=True, ref_to_parent_form__record_reference_type__pk=rtypePK) #IS_NULL
elif rtype == 'DEEP_FRRT':
deepFormType = FormType.objects.filter(pk=FormRecordReferenceType.objects.get(pk=rtypePK).form_type_parent.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
if pConstraint['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__contains=pConstraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__icontains=pConstraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__exact=pConstraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(ref_to_parent_form__record_reference__form_name__contains=pConstraint['TVAL']).values_list('pk', flat=True)) #ICONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(ref_to_parent_form__record_reference__form_name__isnull=True).values_list('pk', flat=True)) #ICONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif rtype == 'DEEP_FRAT':
deepFormType = FormType.objects.filter(pk=FormRecordAttributeType.objects.get(pk=rtypePK).form_type.pk)
#***RECYCLING BIN*** Make sure our this Deep query FormType is always filtered by recycling bin flags
deepFormType = deepFormType.filter(flagged_for_deletion=False)
deepFormType = deepFormType[0]
print >>sys.stderr, deepFormType
print >>sys.stderr, rtypePK
if pConstraint['QCODE'] == '0':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__contains=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '1':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__icontains=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
print >>sys.stderr, "WHAT?!?!?!?! " + str(len(flattenedSet)) + " <!--------------------------------------------------"
print >>sys.stderr, flattenedSet
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '2':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__exact=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '3':
flattenedSet = list(deepFormType.form_set.all().exclude(formrecordattributevalue__record_value__contains=pConstraint['TVAL'], formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
elif pConstraint['QCODE'] == '4':
flattenedSet = list(deepFormType.form_set.all().filter(formrecordattributevalue__record_value__isnull=True, formrecordattributevalue__record_attribute_type__pk=rtypePK).values_list('pk', flat=True)) #CONTAINS
primaryConstraintQuery = constraintQuery.filter(ref_to_parent_form__record_reference__pk__in=flattenedSet)
#***RECYCLING BIN*** Make sure our NEW Constraints query is always filtered by recycling bin flags
primaryConstraintQuery = primaryConstraintQuery.filter(flagged_for_deletion=False)
newPData = {}
newPData['data_label'] = singleConstraintStat['name'] + ' ' + singleConstraintStat['tval'] +' - ' + singleQueryStats['rtype_name'] + ' ' + singleQueryStats['all_terms'][0]['TVAL']
newPData['group'] = counter
newPData['count'] = primaryConstraintQuery.count()
currentDataList.append(newPData)
pCounter += 1
logging.info("TimerG"+ " : " + str(time.clock()))
#Add any constraints if they exist
if len(primaryConstraintList) != 0:
queryStats['p_constraints'] = primaryConstraintList
#print >>sys.stderr, str(masterQuery)
#Now make sure our final queried list has distint values--merging querysets has a tendency to create duplicates
masterQuery = masterQuery.distinct()
#***RECYCLING BIN*** A Final redundant recycling bin filter--just to be safe
masterQuery = masterQuery.filter(flagged_for_deletion=False)
#print >>sys.stderr, str(masterQuery)
#Send a message to our AJAX request object
progressData.jsonString = '{"message":"Running raw SQL","current_query":"","current_term":"''","percent_done":"50","is_complete":"False"}'
progressData.save()
jsonStats = json.dumps(queryStats)
#Send a message to our AJAX request object
progressData.jsonString = '{"message":"Loading Queried Forms & Sending generated stats now...","current_query":"","current_term":"''","percent_done":"60","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
#We need to check the # of rtypes in our header list now--if it's less than 5, then let's add from the ordered list
#We also need to make sure we aren't adding duplicates of the RTYPES, e.g. if we're looking for a match under "Object Number" and Object Number is already
#--in our sorted order-num list--let's not re-add it.
for attType in form_att_type_list:
print >>sys.stderr, "AttTypeList: " + str(attType)
matchfound = False;
for queryAttType in queryRTYPElist:
if attType[2] == queryAttType[2]:
matchfound = True
if matchfound == False and len(queryRTYPElist) < 5:
#let's arbitrarily add '100' to the order number so that our queries are definitely in front of these
queryRTYPElist.append((attType[0] + 100,attType[1],attType[2],attType[3]))
for q in queryRTYPElist:
print >>sys.stderr, "QTypeList: " + str(q)
#serializeTest = serializers.serialize("json", masterQuery)
queryCounter = 0
logging.info("TEST A")
total = queryStats['count']
paginationTotal = total
logging.info("TEST A END")
# print >>sys.stderr, str(masterQuery)
#-----------------------------------------------------------------------------------------------------------
# Here we need to determine whether or not the form type being queried is hierchical.
# --If it is hierachical, then we just organize the masterQuery and sort it with the hierachy in mind
# --as well as with its hierchical labels--otherwise just perform a normal sort by its label
if formtype.is_hierarchical:
global hierarchyFormList
hierarchyFormList = []
#Finally let's organize all of our reference and attribute values to match their provided order number
#We want to find all the forms that have no parent element first--these are the top of the nodes
#Then we'll organize the forms by hierarchy--which can then be put through the normal ordered query
masterQuery = masterQuery.filter(hierarchy_parent=None).exclude(form_number=None, form_name=None)[:25]
#CACHE -- this caches the query for the loop
if masterQuery:
for aForm in masterQuery:
queryCounter += 1
Qpercent = ( queryCounter * (30/(total*1.0)))
finalPercent = (60 + int(Qpercent))
progressData.jsonString = '{"SQL":"True","message":"Loading Queried Forms!","current_query":"'+ str(queryCounter) +'","current_term":"'+ str(total) +'","percent_done":"' + str(finalPercent) + '","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
logging.info(aForm.form_name)
hierarchyFormList.append(aForm)
#Make a recursive function to search through all children
def find_children(currentParentForm):
global hierarchyFormList
for currentChild in currentParentForm.form_set.all():
hierarchyFormList.append(currentChild)
find_children(currentChild)
find_children(aForm)
#reset our masterQuery to our new hierachical list!
masterQuery = hierarchyFormList
else:
#sort the formlist by their sort_index
masterQuery = masterQuery.order_by('sort_index')[:25]
#print >>sys.stderr, masterQuery
#CACHE -- This cache's the query before looping through it
if masterQuery:
for aForm in masterQuery:
queryCounter += 1
Qpercent = ( queryCounter * (30/(total*1.0)))
finalPercent = (60 + int(Qpercent))
progressData.jsonString = '{"SQL":"True","message":"Loading Queried Forms!","current_query":"'+ str(queryCounter) +'","current_term":"'+ str(total) +'","percent_done":"' + str(finalPercent) + '","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
print >>sys.stderr, str(aForm.pk) + ": <!-- Current Form Pk"
rowList = []
#Let's loop through each item in the queryRTYPE list and match up the frav's in each queried form so the headers match the form attribute values
for rtype in queryRTYPElist:
if rtype[1] == 'frat':
print >>sys.stderr, str(rtype[2]) + ' ' + str(aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2]).count())
formRVAL = aForm.formrecordattributevalue_set.all().filter(record_attribute_type__pk=rtype[2])
#We need to check for NULL FRAV's here. When a user manually creates new forms, they don't always have FRAVS created for them if they leave it blank
if formRVAL.exists():
rowList.append((rtype[0],'frav',formRVAL[0].record_value, formRVAL[0].pk))
else:
print >>sys.stderr, "Whoops--something happened. There are no RVALS for 'frats' using: " + str(rtype[2])
#If there isn't an RVAL for this RTYPE then make a new one and return it instead
newFRAV = FormRecordAttributeValue()
newFRAV.record_attribute_type = FormRecordAttributeType.objects.get(pk=rtype[2])
newFRAV.form_parent = aForm
newFRAV.project = project
newFRAV.record_value = ""
newFRAV.save()
rowList.append((rtype[0],'frav',newFRAV.record_value, newFRAV.pk))
else:
print >>sys.stderr, aForm.ref_to_parent_form.all().count()
print >>sys.stderr, aForm.pk
for frrt in aForm.ref_to_parent_form.all():
print >>sys.stderr, "" + str(frrt.pk)
formRVAL = aForm.ref_to_parent_form.all().filter(record_reference_type__pk=rtype[2])
if formRVAL.exists():
formRVAL = formRVAL[0]
#First check to see if there are any relations stored in the many to many relationship
# --if there are, then load them normally, and if not change the value to a frrv-ext tag and store the external ID for the
# --ajax request to process properly
if formRVAL.record_reference.all().count() > 0:
#we need to store a list of its references--it's a manytomany relationship
#A comma should be sufficient to separate them, but to be safe--we'll make our delimeter a ^,^
#-- we also need to provide the formtype pk value for the link
listOfRefs = ""
for rec in formRVAL.record_reference.all():
listOfRefs += str(rec) + '|^|' + str(rec.form_type.pk) + '|^|' + str(rec.pk) + "^,^"
#remove the last delimeter
listOfRefs = listOfRefs[0:-3]
rowList.append((rtype[0],'frrv',listOfRefs, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-ext for the AJAX callable
rowList.append((rtype[0],'frrv-ext',formRVAL.external_key_reference, formRVAL.pk))
else:
#Store the external key value instead and change it to a frrv-null for the AJAX callable
rowList.append((rtype[0],'frrv-null',"", ""))
#sort the new combined reference ad attribute type list combined
rowList = sorted(rowList, key=lambda att: att[0])
print >> sys.stderr, str(rowList)
#Now let's handle the thumbnail bit of business for the query
#--If the current form IS a media type already, then use itself to grab the thumbnail URI
if aForm.form_type.type == 1:
thumbnailURI = aForm.get_thumbnail_type()
else:
#let's find the first media type in the order but offer a default to "NO PREVIEW" if not found
thumbnailURI = staticfiles_storage.url("/static/site-images/no-thumb-missing.png")
for record in rowList:
#if it's a reference
if record[1] == 'frrv' or record[1] == 'frrv-ext':
currentRTYPE = FormRecordReferenceValue.objects.get(pk=int(record[3]))
#if it's not a NoneType reference:
if currentRTYPE.record_reference_type.form_type_reference != None:
#If its a reference to a media type
if currentRTYPE.record_reference_type.form_type_reference.type == 1:
print >> sys.stderr, "WE GOT A MATCH"
#Because a form record reference value is a ManyToMany relationship, we just grab the first one in the list
#TODO this may need to be edited later--because you can't order the selections. I may add another ForeignKey called
#"Thumbnail Reference" which links to a single relation to a form of a media type--this would also
#probably solve the complexity of looping through to grab it as it stands right now
#****WE also have to check for NULL references
if currentRTYPE.record_reference.all().count() > 0:
thumbnailURI = currentRTYPE.record_reference.all()[0].get_thumbnail_type()
break
#we only want the first 5 values from the final ordered list of attributes
rowList = rowList[0:5]
formList.append([thumbnailURI,str(aForm.pk), aForm, rowList])
form_att_type_list, form_list = form_att_type_list, formList
#update our progress bar
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"90","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
finishedJSONquery = {}
headerList=[]
for rtype in queryRTYPElist:
rtypeDict = {}
rtypeDict["index"] = rtype[0]
rtypeDict["rtype"] = rtype[1]
rtypeDict["pk"] = rtype[2]
rtypeDict["name"] = rtype[3]
headerList.append(rtypeDict)
#update our progress bar
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"93","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
finishedJSONquery["rtype_header"] = headerList
allFormList = []
counter = 0
total = len(formList)
for form in formList:
#update our progress bar
counter += 1
currentPercent = 93 + int((counter*(5.0/total)))
progressData.jsonString = '{"message":"Packaging Query for User","current_query":"","current_term":"","percent_done":"'+str(currentPercent)+'","is_complete":"False","stats":'+jsonStats+'}'
progressData.save()
formDict = {}
formDict["thumbnail_URI"] = form[0]
formDict["pk"] = form[1]
if formtype.is_hierarchical: formDict["form_id"] = form[2].get_hierarchy_label()
else: formDict["form_id"] = form[2].form_name
formRVALS = []
for rval in form[3]:
rvalDict = {}
rvalDict["index"] = rval[0]
rvalDict["rtype"] = rval[1]
rvalDict["value"] = rval[2]
rvalDict["pk"] = rval[3]
formRVALS.append(rvalDict)
formDict["rvals"] = formRVALS
allFormList.append(formDict)
finishedJSONquery["form_list"] = allFormList
finishedJSONquery["formtype"] = formtype.form_type_name
finishedJSONquery["formtype_pk"] = formtype.pk
finishedJSONquery["project_pk"] = request.user.permissions.project.pk
finishedJSONquery["project"] = request.user.permissions.project.name
finishedJSONquery["currentQuery"] = request.POST['query']
finishedJSONquery["totalResultCount"] = paginationTotal
#convert to JSON
finishedJSONquery = json.dumps(finishedJSONquery)
#Update our progress bar
progressData.jsonString = '{"message":"Finished!","current_query":"","current_term":"","percent_done":"100","is_complete":"True","stats":'+jsonStats+'}'
progressData.save()
print >>sys.stderr, "Timer End"
return HttpResponse(finishedJSONquery, content_type="application/json")
ERROR_MESSAGE += "Error: You don't have permission to access this FormType from another project"
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access querying this project"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_FORM_RTYPES() *Recycling
#=======================================================#
def get_form_rtypes(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
#:::This endpoint returns a JSON list of all rtype values(their values and pk's) associated with a given form. We are only accessing data
# --so the access level is 1. Any user should be able to use this endpoint.
#
# Returned JSON Example: {"rtype_list":[
# {"rtype_pk": "1",
# "rtype_label": "Object Shape",
# "rtype": "FRAT",
# "rval": {"Spherical":"<pk#>"}, <-- This will similarly be a json object with 1 key/val pair
#
# },
# {"rtype_pk": "6",
# "rtype_label": "Associated Unit",
# "rtype": "FRRT",
# "rval": {"Unit 1":"<pk#>", "Unit 2":"<pk#>"}, <-- if this is a frrt, then this will be another json object of key/val pairs
# "ext_key": "1,2" <-- This is just the raw ext key string
# "thumbnail":"www.geioh.coms/hoidjjds.jpg"
# },
# ]}
#
# EXPECTED POST VARIABLES:
# -- 'form_pk'
#------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#$$$ SECURITY $$$ Make sure we only take POST requests
if request.method == 'POST':
currentForm = Form.objects.get(pk=request.POST['form_pk'])
#$$$ SECURITY $$$ Make sure form is in the same project space as the user or refuse the request for the list
if currentForm.project.pk == request.user.permissions.project.pk:
jsonData = {}
rtype_list = []
jsonData['rtype_list'] = rtype_list
#Alright--let's load our RTYPEs from the current Form requested
#*** RECYCLING BIN *** Let's filter them out by their recycling flags as well
frav_list = currentForm.formrecordattributevalue_set.all().filter(flagged_for_deletion=False)
frrv_list = currentForm.ref_to_parent_form.all().filter(flagged_for_deletion=False)
#If Statement forces evaluation of the query set before the loop
if frav_list:
#Let's load all the FRATs and FRAVs first
for FRAV in frav_list:
currentRTYPE = {}
currentRVAL = {}
currentRTYPE['rtype_pk'] = FRAV.record_attribute_type.pk
currentRTYPE['rtype_label'] = FRAV.record_attribute_type.record_type
currentRTYPE['rtype'] = "FRAT"
currentRVAL[FRAV.pk] = FRAV.record_value
currentRTYPE['rval'] = currentRVAL
rtype_list.append(currentRTYPE)
#If Statement forces evaluation of the query set before the loop
if frrv_list:
print >>sys.stderr, frrv_list
for FRRV in frrv_list:
currentRTYPE = {}
rvalList = []
print >>sys.stderr, FRRV.pk
currentRTYPE['rtype_pk'] = FRRV.record_reference_type.pk
currentRTYPE['rtype_label'] = FRRV.record_reference_type.record_type
currentRTYPE['rtype'] = "FRRT"
#sometimes if not initialized, there won't be a FRRT reference--it will be a "NoneType" or "Null"
#--if that's the case, there will be no PK value, so we will set the ref_formtype to "None" in that case
if FRRV.record_reference_type.form_type_reference != None: currentRTYPE['ref_formtype'] = FRRV.record_reference_type.form_type_reference.pk
else: currentRTYPE['ref_formtype'] = "None"
currentRTYPE['ext_key'] = FRRV.external_key_reference
currentRTYPE['rval_pk'] = FRRV.pk
for FRRV_REF in FRRV.record_reference.all():
currentRVAL = {}
currentRVAL['pk'] = FRRV_REF.pk
currentRVAL['name'] = FRRV_REF.form_name
currentRVAL['thumbnail'] = FRRV_REF.get_ref_thumbnail()
currentRVAL['url'] = reverse('maqlu_admin:edit_form',kwargs={'project_pk': request.user.permissions.project.pk, 'form_type_pk':FRRV_REF.form_type.pk, 'form_pk': FRRV_REF.pk})
rvalList.append(currentRVAL)
currentRTYPE['rval'] = rvalList
rtype_list.append(currentRTYPE)
#If there are no FRRVs then just attach a list of the FRRT's instead with no rval data
else:
frrt_list = currentForm.form_type.ref_to_parent_formtype.all()
print >>sys.stderr, frrt_list
if frrt_list:
for FRRT in frrt_list:
print >>sys.stderr, FRRT.form_type_reference
currentRTYPE = {}
currentRTYPE['rtype_pk'] = FRRT.pk
currentRTYPE['rtype_label'] = FRRT.record_type
currentRTYPE['rtype'] = "FRRT"
currentRTYPE['ref_formtype'] = FRRT.form_type_reference.pk
currentRTYPE['ext_key'] = ""
currentRTYPE['rval_pk'] = ""
currentRTYPE['rval'] = ""
rtype_list.append(currentRTYPE)
#convert python dict to a json string and send it back as a response
jsonData = json.dumps(jsonData);
return HttpResponse(jsonData, content_type="application/json")
ERROR_MESSAGE += "Error: You do not have permission to accesss this project."
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
#=======================================================#
# ACCESS LEVEL : 1 GET_FORMTYPE_FORM_LIST() *Recycling
#=======================================================#
def get_formtype_form_list(self, request):
#***************#
ACCESS_LEVEL = 1
#***************#
#------------------------------------------------------------------------------------------------------------------------------------
#:::This endpoint returns a JSON list of all form names and pk values attached to a specific formtype. It's used mainly to
# --to help drop-down menu widgets function, but may be used by other features as well.
#
#------------------------------------------------------------------------------------------------------------------------------------
ERROR_MESSAGE = ""
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
#$$$ SECURITY $$$ Make sure we only take POST requests
if request.method == 'POST':
print >>sys.stderr, request.POST
currentFormType = FormType.objects.get(pk=request.POST['formtype_pk'])
#$$$ SECURITY $$$ Make sure form is in the same project space as the user or refuse the request for the list
if currentFormType.project.pk == request.user.permissions.project.pk:
jsonData = {}
form_list = []
jsonData['form_list'] = form_list
#*** RECYCLING BIN *** Make sure Forms are filtered by their deletion flags
for aForm in currentFormType.form_set.all().filter(flagged_for_deletion=False):
currentForm = {}
currentForm['form_label'] = aForm.form_name
currentForm['form_pk'] = aForm.pk
form_list.append(currentForm)
#convert python dict to a json string and send it back as a response
jsonData = json.dumps(jsonData);
return HttpResponse(jsonData, content_type="application/json")
ERROR_MESSAGE += "Error: You do not have permission to accesss this project."
ERROR_MESSAGE += "Error: You have not submitted through POST"
else: ERROR_MESSAGE += "Error: You do not have permission to access modifying user information"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
return HttpResponse('{"ERROR":"'+ ERROR_MESSAGE +'"}',content_type="application/json")
##==========================================================================================================================
# ADMIN DJANGO VIEWS ****************************************************************************************************
##==========================================================================================================================
#=====================================================================================#
# ACCESS LEVEL : 1 TEMPLATE_ACCESS_LEVEL : 3 VIEW_FORM_TYPE() *RECYCLING
#=====================================================================================#
def view_form_type(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 1
TEMPLATE_ACCESS_LEVEL = 3
#************************#
#-----------------------------------------------------------------------------------
# This view displays the view form type template--or essentially the query engine
# --attached to it. It allows the user to look at forms and their details in bulk
# --according to their form type, and also allows those forms to be queried by
# --the query engine where graphs/charts etc. are produced. All this view needs to
# --do is pass a few variables to the template, and display the template. The AJAX
# --and template will handle all permissions etc. from there
ERROR_MESSAGE = ""
#Setup our variable's we'll pass to the template if allowed
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
formtype = FormType.objects.get(pk=kwargs['form_type_pk'])
except:
raise Http404("This Page Does Not Exist!")
#*** RECYCLING BIN *** Return a 404 Error if the requsted model instance is flagged for deletion (in the recycling bin)
if formtype.flagged_for_deletion == True: raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk and formtype.project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'counter':counter})
kwargs.update({'project':project})
kwargs.update({'formtype':formtype})
kwargs.update({'form':'False'})
kwargs.update({'toolbar_title_code': 'FormType_' + kwargs['form_type_pk']})
kwargs.update({'deletable': 'False'})
print >>sys.stderr, request
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/view_form_type.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 1 TEMPLATE_ACCESS_LEVEL : 3 QUERY_FORM_TYPE() *RECYCLING
#=====================================================================================#
def query_form_type(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 1
TEMPLATE_ACCESS_LEVEL = 3
#************************#
#-----------------------------------------------------------------------------------
# This view displays the view form type template--or essentially the query engine
# --attached to it. It allows the user to look at forms and their details in bulk
# --according to their form type, and also allows those forms to be queried by
# --the query engine where graphs/charts etc. are produced. All this view needs to
# --do is pass a few variables to the template, and display the template. The AJAX
# --and template will handle all permissions etc. from there
ERROR_MESSAGE = ""
#Setup our variable's we'll pass to the template if allowed
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
formtype = FormType.objects.get(pk=kwargs['form_type_pk'])
except:
raise Http404("This Page Does Not Exist!")
#*** RECYCLING BIN *** Return a 404 Error if the requsted model instance is flagged for deletion (in the recycling bin)
if formtype.flagged_for_deletion == True: raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk and formtype.project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'counter':counter})
kwargs.update({'project':project})
kwargs.update({'formtype':formtype})
kwargs.update({'form':'False'})
kwargs.update({'toolbar_title_code': 'FormType_' + kwargs['form_type_pk']})
kwargs.update({'deletable': 'False'})
print >>sys.stderr, request
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/query_form_type.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 1 TEMPLATE_ACCESS_LEVEL : 3 MASTER_QUERY_ENGINE() *RECYCLING
#=====================================================================================#
def master_query_engine(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 1
TEMPLATE_ACCESS_LEVEL = 3
#************************#
#-----------------------------------------------------------------------------------
# This view displays the view form type template--or essentially the query engine
# --attached to it. It allows the user to look at forms and their details in bulk
# --according to their form type, and also allows those forms to be queried by
# --the query engine where graphs/charts etc. are produced. All this view needs to
# --do is pass a few variables to the template, and display the template. The AJAX
# --and template will handle all permissions etc. from there
ERROR_MESSAGE = ""
#Setup our variable's we'll pass to the template if allowed
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'counter':counter})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'toolbar_title_code': 'Project_' + kwargs['project_pk']})
kwargs.update({'deletable': 'False'})
print >>sys.stderr, request
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/master_query_engine.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 4 TEMPLATE_ACCESS_LEVEL : 4 GEOJSON_IMPORTER()
#=====================================================================================#
def geojson_importer(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 4
TEMPLATE_ACCESS_LEVEL = 4
#************************#
#-----------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------
# This view displays the base template for the CSV importer to create new form types
# --and populate them with forms based on rows in the CSV
# --Most of the logic is done in an API Endpoint, but the base template provides the necessary
# --tools in Jscript to perform all of this.
# The Importer works client-side to process the CSV file in JSON and when the user finishes the form,
# --it will upload the processed CSV data to the server and run the actual database import
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("Project Does Not Exist!")
if project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'counter':counter})
kwargs.update({'toolbar_title_code': 'CSVImporter_none'})
kwargs.update({'deletable': 'False'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/geojson_importer.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 4 TEMPLATE_ACCESS_LEVEL : 4 FORM_TYPE_IMPORTER()
#=====================================================================================#
def form_type_importer(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 4
TEMPLATE_ACCESS_LEVEL = 4
#************************#
#-----------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------
# This view displays the base template for the CSV importer to create new form types
# --and populate them with forms based on rows in the CSV
# --Most of the logic is done in an API Endpoint, but the base template provides the necessary
# --tools in Jscript to perform all of this.
# The Importer works client-side to process the CSV file in JSON and when the user finishes the form,
# --it will upload the processed CSV data to the server and run the actual database import
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("Project Does Not Exist!")
if project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'counter':counter})
kwargs.update({'toolbar_title_code': 'CSVImporter_none'})
kwargs.update({'deletable': 'False'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/new_formtype_importer.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 4 TEMPLATE_ACCESS_LEVEL : 4 FORMTYPE_FORM_IMPORTER()
#=====================================================================================#
def formtype_form_importer(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 4
TEMPLATE_ACCESS_LEVEL = 4
#************************#
#-----------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------
# This view displays the base template for the CSV importer to create new form types
# --and populate them with forms based on rows in the CSV
# --Most of the logic is done in an API Endpoint, but the base template provides the necessary
# --tools in Jscript to perform all of this.
# The Importer works client-side to process the CSV file in JSON and when the user finishes the form,
# --it will upload the processed CSV data to the server and run the actual database import
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
formtype = FormType.objects.get(pk=kwargs['form_type_pk'])
except:
raise Http404("Project Does Not Exist!")
if project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'formtype':formtype})
kwargs.update({'toolbar_title_code': 'CSVImporter_none'})
kwargs.update({'deletable': 'False'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/formtype_form_importer.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 4 TEMPLATE_ACCESS_LEVEL : 4 RTYPE_TYPE_IMPORTER()
#=====================================================================================#
def rtype_importer(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 4
TEMPLATE_ACCESS_LEVEL = 4
#************************#
#-----------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------
#
#
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
formtype = FormType.objects.get(pk=kwargs['form_type_pk'])
except:
raise Http404("Project Does Not Exist!")
if project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'formtype':formtype})
kwargs.update({'toolbar_title_code': 'FormType_' + kwargs['form_type_pk']})
kwargs.update({'deletable': 'False'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/new_rtype_importer.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 1 TEMPLATE_ACCESS_LEVEL : 5 PROJECT_HOME()
#=====================================================================================#
def project_home(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 1
TEMPLATE_ACCESS_LEVEL = 5
#************************#
#-----------------------------------------------------------------------------------
# This view delivers the project overview of users/stats etc. Only a level 5 admin can edit
# --the info on this screen. Although the access level is set to 5 on this view, we allow all
# --project users to see this page. Access to modifications are prohibited in the template
# --using this access_level passed to the **kwargs however, e.g. save buttons/delete buttons/delete
# --will not be generated if someone isn't level 5
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("Project Does Not Exist!")
if request.user.permissions.project.pk == project.pk:
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'project':project})
kwargs.update({'toolbar_title_code': 'Project_' + kwargs['project_pk']})
kwargs.update({'form':'False'})
kwargs.update({'deletable': 'False'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/project_control_panel.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 3 TEMPLATE_ACCESS_LEVEL : 3 EDIT_FORM_TYPE() *RECYCLING
#=====================================================================================#
def edit_form_type(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 3
TEMPLATE_ACCESS_LEVEL = 3
#************************#
#------------------------------------------------------------------------------------------------------
# This view just displays the form type editor page. Only a level 3 access can see and use this page
# --It's not necessary for any lower access to view this page
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
formtype = FormType.objects.get(pk=kwargs['form_type_pk'])
except:
raise Http404("Project Does Not Exist!")
#*** RECYCLING BIN *** Return a 404 Error if the requsted model instance is flagged for deletion (in the recycling bin)
if formtype.flagged_for_deletion == True: raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk and formtype.project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'counter':counter})
kwargs.update({'project':project})
kwargs.update({'formtype':formtype})
kwargs.update({'form':'False'})
kwargs.update({'toolbar_title_code': 'FormType_' + kwargs['form_type_pk']})
kwargs.update({'deletable': 'True'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/edit_form_type.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 4 TEMPLATE_ACCESS_LEVEL : 4 NEW_FORM_TYPE()
#=====================================================================================#
def new_form_type(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 4
TEMPLATE_ACCESS_LEVEL = 4
#************************#
#-----------------------------------------------------------------------------------------------
# This view show the new form type creator template. It allows users to create new form types
# --for their project. Because it is creating a new form type it is limited only to those with
# --level 4 access.
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("Project Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk:
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'toolbar_title_code': 'NewFormType_none'})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'deletable': 'False'})
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/new_form_type.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 1 TEMPLATE_ACCESS_LEVEL : 2 EDIT_FORM() *RECYCLING
#=====================================================================================#
def edit_form(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 1
TEMPLATE_ACCESS_LEVEL = 2
#************************#
#-----------------------------------------------------------------------------------------------
# This view shows the page to edit an existing form. Any project user can view this, but only level 2
# --and above can use its functionality to submit data
ERROR_MESSAGE = ""
try:
form = Form.objects.get(pk=kwargs['form_pk'])
form_type = FormType.objects.get(pk=kwargs['form_type_pk'])
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("Form does not exist")
#Do something with request here
#*** RECYCLING BIN *** Return a 404 Error if the requsted model instance is flagged for deletion (in the recycling bin)
if form.flagged_for_deletion == True: raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk and form.project.pk == request.user.permissions.project.pk and form.form_type.pk == form_type.pk:
counter = Counter()
counter.reset()
#Temp function to make displaying the page much faster--remove the database query hits from the actual .html file
#Need a list of this form's rtypes and a list of this forms rtype values
print >>sys.stderr, "TIMER FOR FORM EDIT A"
#*** RECYCLING BIN *** We need to make sure all the RTYPES and RVALS(by their RTYPE) are filtered out by their deletion flags
frat_list = form_type.formrecordattributetype_set.all().filter(flagged_for_deletion=False)
frav_list = form.formrecordattributevalue_set.all().filter(record_attribute_type__flagged_for_deletion=False)
frrt_list = form_type.ref_to_parent_formtype.all().filter(flagged_for_deletion=False)
frrv_list = form.ref_to_parent_form.all().filter(record_reference_type__flagged_for_deletion=False)
kwargs.update({'api_urls':get_api_endpoints()})
kwargs.update({'frat_list':frat_list})
kwargs.update({'frav_list':frav_list})
kwargs.update({'frrt_list':frrt_list})
kwargs.update({'frrv_list':frrv_list})
print >>sys.stderr, "TIMER FOR FORM EDIT A"
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'formtype':form_type})
kwargs.update({'form':form})
kwargs.update({'project':project})
kwargs.update({'counter':counter})
kwargs.update({'toolbar_title_code': 'Form_' + kwargs['form_pk']})
kwargs.update({'deletable': 'True'})
else:
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/edit_form.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 2 TEMPLATE_ACCESS_LEVEL : 2 NEW_FORM() *RECYCLING
#=====================================================================================#
def new_form(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 2
TEMPLATE_ACCESS_LEVEL = 2
#************************#
#-----------------------------------------------------------------------------------------------
# This view shows the page to edit an existing form. Any project user can view this, but only level 2
# --and above can use its functionality to submit data
ERROR_MESSAGE = ""
try:
form_type = FormType.objects.get(pk=kwargs['form_type_pk'])
project = FormProject.objects.get(pk=kwargs['project_pk'])
except FormType.DoesNotExist:
raise Http404("Form Type does not exist")
#*** RECYCLING BIN *** Return a 404 Error if the requsted model instance is flagged for deletion (in the recycling bin)
# --we still need to check the formtype to prevent URL hacking
if form_type.flagged_for_deletion == True: raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk and form_type.project.pk == request.user.permissions.project.pk:
kwargs.update({'api_urls':get_api_endpoints()})
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'form':'False'})
kwargs.update({'formtype':form_type})
kwargs.update({'project':project})
kwargs.update({'toolbar_title_code': 'NewForm_' + kwargs['form_type_pk']})
kwargs.update({'deletable': 'False'})
else:
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/new_form.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 2 TEMPLATE_ACCESS_LEVEL : 2 EDIT_FORM_TYPE_TEMPLATE() *RECYCLING
#=====================================================================================#
def edit_form_type_template(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 2
TEMPLATE_ACCESS_LEVEL = 2
#************************#
#-----------------------------------------------------------------------------------------------
# This is a test view for templating views -- TODO!!!!!!!!!!!!!!!!!!!!!!!!!!!
ERROR_MESSAGE = ""
try:
form_type = FormType.objects.get(pk=kwargs['form_type_pk'])
project = FormProject.objects.get(pk=kwargs['project_pk'])
except FormType.DoesNotExist:
raise Http404("Form Type does not exist")
#*** RECYCLING BIN *** Return a 404 Error if the requsted model instance is flagged for deletion (in the recycling bin)
if form_type.flagged_for_deletion == True: raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk and form_type.project.pk == request.user.permissions.project.pk:
kwargs.update({'api_urls':get_api_endpoints()})
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'form':'False'})
kwargs.update({'formtype':form_type})
kwargs.update({'project':project})
kwargs.update({'toolbar_title_code': 'NewForm_' + kwargs['form_type_pk']})
kwargs.update({'deletable': 'False'})
else:
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/edit_formtype_template.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 5 TEMPLATE_ACCESS_LEVEL : 5 RECYCLING_BIN()
#=====================================================================================#
def recycling_bin(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 5
TEMPLATE_ACCESS_LEVEL = 5
#************************#
#-----------------------------------------------------------------------------------------------
# This redirects the Admin to the recycling bin page
ERROR_MESSAGE = ""
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except FormType.DoesNotExist:
raise Http404("Project does not exist")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk:
kwargs.update({'api_urls':get_api_endpoints()})
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'project':project})
kwargs.update({'toolbar_title_code': 'Project_' + kwargs['project_pk']})
kwargs.update({'form':'False'})
kwargs.update({'deletable': 'False'})
else:
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/recycling_bin.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
#=====================================================================================#
# ACCESS LEVEL : 1 TEMPLATE_ACCESS_LEVEL : 3 GEOSPATIAL_ENGINE()
#=====================================================================================#
def geospatial_engine(self, request, **kwargs):
#************************#
ACCESS_LEVEL = 1
TEMPLATE_ACCESS_LEVEL = 3
#************************#
#-----------------------------------------------------------------------------------
# This view displays the view form type template--or essentially the query engine
# --attached to it. It allows the user to look at forms and their details in bulk
# --according to their form type, and also allows those forms to be queried by
# --the query engine where graphs/charts etc. are produced. All this view needs to
# --do is pass a few variables to the template, and display the template. The AJAX
# --and template will handle all permissions etc. from there
ERROR_MESSAGE = ""
#Setup our variable's we'll pass to the template if allowed
try:
project = FormProject.objects.get(pk=kwargs['project_pk'])
except:
raise Http404("This Page Does Not Exist!")
#Make sure the user is trying to access their project and not another project
#If they are trying to access another project--warn them their action has been logged
#after redirecting them to a warning page
if project.pk == request.user.permissions.project.pk:
counter = Counter()
counter.reset()
kwargs.update({'api_urls':get_api_endpoints()})
kwargs.update({'access_level':TEMPLATE_ACCESS_LEVEL})
kwargs.update({'user_access':request.user.permissions.access_level})
kwargs.update({'user_project':request.user.permissions.project})
kwargs.update({'counter':counter})
kwargs.update({'project':project})
kwargs.update({'form':'False'})
kwargs.update({'toolbar_title_code': 'Project_' + kwargs['project_pk']})
kwargs.update({'deletable': 'False'})
print >>sys.stderr, request
else:
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), 'Trying to access another project.', request.META)
return HttpResponse(render_to_response('maqluengine/admin_warning.html', kwargs, RequestContext(request)))
#Check our user's session and access level
if SECURITY_check_user_permissions(ACCESS_LEVEL, request.user.permissions.access_level):
return HttpResponse(render_to_response('maqluengine/geospatial_engine.html', kwargs, RequestContext(request)))
else: ERROR_MESSAGE += "Error: You do not have permission to view this page"
#If anything goes wrong in the process, return an error in the json HTTP Response
SECURITY_log_security_issues(request.user, 'admin.py - ' + str(sys._getframe().f_code.co_name), ERROR_MESSAGE, request.META)
kwargs.update({'ERROR_MESSAGE': ERROR_MESSAGE})
return HttpResponse(render_to_response('maqluengine/admin_error.html', kwargs, RequestContext(request)))
##==========================================================================================================================
# OVERRIDDEN ADMIN DJANGO VIEWS *****************************************************************************************
##==========================================================================================================================
def index(self, request, **kwargs):
#This function is important for security reasons: It essentially ovverides all normal admin index operations
#--and redirects any logged in user to their respective project's control panel
# --The remainder of the built-in admin views will need to be overidden--much like this index
# --to make sure that only the custom admin can be used. The reason is that all users have to be "is_staff"
# --giving them access to change things. If they find a way into the Django built-in admin, they will be able to affect
# --the database in potentially nefarious ways
print >>sys.stderr, reverse('maqlu_admin:project_home',kwargs={'project_pk': request.user.permissions.project.pk})
return redirect('maqlu_admin:project_home',project_pk=request.user.permissions.project.pk)
##==========================================================================================================================
# CUSTOM ADMIN URL PATTERNS *********************************************************************************************
##==========================================================================================================================
def get_urls(self):
#============================================================
# HELP WITH URL PATTERNS
# --I've found this to be an incredibly frustrating process, but finally discovered the secrets to reversing
# --urls by 'name' in these patterns below. Thank God! There are 2 ways to handle this--through a Redirect() in a view
# --or through a Reverse() --both link to the same regex expression in the url patterns, but take in args/kwargs differently
# --This difference of arguments is what kept me frustrated for several hours--it's not well documented this small issue.
# --Here are two examples to show the differerence:
# :redirect('maqlu_admin:project_home',project_pk=request.user.permissions.project.pk)
# :reverse('maqlu_admin:project_home',kwargs={'project_pk': request.user.permissions.project.pk})
# --Also notice that these custom AdminSite views have their own namespace attached to the custom AdminSite
# --in this case, it is named "maqlu_admin" in the "MyAdminSite" Class above. Views can be referenced as 'maqlu_admin:<view_name>'
#
# --FOR TEMPLATES: use this method {% url 'maqlu_admin:view-name' arg1=v1 arg2=v2 %}
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
my_urls = patterns('',
#Base Admin Site
url(r'^$', admin.site.admin_view(self.index), name='index'),
#All Admin API Endpoints
url(r'^get_user_list/$', admin.site.admin_view(self.get_user_list), name='get_user_list'),
url(r'^run_query_engine/$', admin.site.admin_view(self.run_query_engine), name='run_query_engine'),
url(r'^run_master_query_engine/$', admin.site.admin_view(self.run_master_query_engine), name='run_master_query_engine'),
url(r'^save_project_changes/$', admin.site.admin_view(self.save_project_changes), name='save_project_changes'),
url(r'^save_form_type_changes/$', admin.site.admin_view(self.save_form_type_changes), name='save_form_type_changes'),
url(r'^save_form_changes/$', admin.site.admin_view(self.save_form_changes), name='save_form_changes'),
url(r'^create_new_form/$', admin.site.admin_view(self.create_new_form), name='create_new_form'),
url(r'^create_new_form_type/$', admin.site.admin_view(self.create_new_form_type), name='create_new_form_type'),
url(r'^create_new_form_type_template/$', admin.site.admin_view(self.create_new_form_type_template), name='create_new_form_type_template'),
url(r'^run_form_type_importer/$', admin.site.admin_view(self.run_form_type_importer), name='run_form_type_importer'),
url(r'^run_form_type_form_importer/$', admin.site.admin_view(self.run_form_type_form_importer), name='run_form_type_form_importer'),
url(r'^run_new_rtype_importer/$', admin.site.admin_view(self.run_new_rtype_importer), name='run_new_rtype_importer'),
url(r'^run_geojson_importer/$', admin.site.admin_view(self.run_geojson_importer), name='run_geojson_importer'),
url(r'^get_previous_next_forms/$', admin.site.admin_view(self.get_previous_next_forms), name='get_previous_next_forms'),
url(r'^username_taken/$', admin.site.admin_view(self.username_taken), name='username_taken'),
url(r'^debug_tool/$', admin.site.admin_view(self.debug_tool), name='debug_tool'),
url(r'^debug_toolA/$', admin.site.admin_view(self.debug_toolA), name='debug_toolA'),
url(r'^delete_form_type/$', admin.site.admin_view(self.delete_form_type), name='delete_form_type'),
url(r'^delete_form/$', admin.site.admin_view(self.delete_form), name='delete_form'),
url(r'^delete_frat/$', admin.site.admin_view(self.delete_frat), name='delete_frat'),
url(r'^delete_frrt/$', admin.site.admin_view(self.delete_frrt), name='delete_frrt'),
url(r'^delete_form_type_group/$', admin.site.admin_view(self.delete_form_type_group), name='delete_form_type_group'),
url(r'^restore_form_type/$', admin.site.admin_view(self.restore_form_type), name='restore_form_type'),
url(r'^restore_form/$', admin.site.admin_view(self.restore_form), name='restore_form'),
url(r'^restore_frat/$', admin.site.admin_view(self.restore_frat), name='restore_frat'),
url(r'^restore_frrt/$', admin.site.admin_view(self.restore_frrt), name='restore_frrt'),
url(r'^recycle_form_type/$', admin.site.admin_view(self.recycle_form_type), name='recycle_form_type'),
url(r'^recycle_form/$', admin.site.admin_view(self.recycle_form), name='recycle_form'),
url(r'^recycle_frat/$', admin.site.admin_view(self.recycle_frat), name='recycle_frat'),
url(r'^recycle_frrt/$', admin.site.admin_view(self.recycle_frrt), name='recycle_frrt'),
url(r'^load_recycling_bin/$', admin.site.admin_view(self.load_recycling_bin), name='load_recycling_bin'),
#url(r'^csvexport/$', admin.site.admin_view(self.test_csv_export), name='test_csv_export'),
url(r'^modify_project_user/$', admin.site.admin_view(self.modify_project_user), name='modify_project_user'),
url(r'^get_form_search_list/$', admin.site.admin_view(self.get_form_search_list), name='get_form_search_list'),
url(r'^bulk_edit_formtype/$', admin.site.admin_view(self.bulk_edit_formtype), name='bulk_edit_formtype'),
url(r'^get_rtype_list/$', admin.site.admin_view(self.get_rtype_list), name='get_rtype_list'),
url(r'^get_formtype_form_list/$', admin.site.admin_view(self.get_formtype_form_list), name='get_formtype_form_list'),
url(r'^get_form_rtypes/$', admin.site.admin_view(self.get_form_rtypes), name='get_form_rtypes'),
url(r'^get_formtype_geospatial_layers/$', admin.site.admin_view(self.get_formtype_geospatial_layers), name='get_formtype_geospatial_layers'),
url(r'^navigate_query_pagination/$', admin.site.admin_view(self.navigate_query_pagination), name='navigate_query_pagination'),
url(r'^navigate_master_query_pagination/$', admin.site.admin_view(self.navigate_master_query_pagination), name='navigate_master_query_pagination'),
url(r'^check_progress/$', admin.site.admin_view(self.check_progress), name='check_progress'),
url(r'^check_progress_query/$', admin.site.admin_view(self.check_progress_query), name='check_progress_query'),
url(r'^export_formtype/$', admin.site.admin_view(self.export_formtype), name='export_formtype'),
url(r'^export_project/$', admin.site.admin_view(self.export_project), name='export_project'),
url(r'^get_projects/$', admin.site.admin_view(self.get_projects), name='get_projects'),
url(r'^get_formtypes/$', admin.site.admin_view(self.get_formtypes), name='get_formtypes'),
url(r'^get_rtypes/$', admin.site.admin_view(self.get_rtypes), name='get_rtypes'),
url(r'^save_user_query/$', admin.site.admin_view(self.save_user_query), name='save_user_query'),
#All Admin Template Views
url(r'^project/(?P<project_pk>[0-9]+)/$', self.admin_view(self.project_home), name='project_home'),
url(r'^project/(?P<project_pk>[0-9]+)/recycling_bin/$', self.admin_view(self.recycling_bin), name='recycling_bin'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype_importer/$', admin.site.admin_view(self.form_type_importer), name='formtype_importer'),
url(r'^project/(?P<project_pk>[0-9]+)/geojson_importer/$', admin.site.admin_view(self.geojson_importer), name='geojson_importer'),
url(r'^project/(?P<project_pk>[0-9]+)/geospatial_engine/$', admin.site.admin_view(self.geospatial_engine), name='geospatial_engine'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype/(?P<form_type_pk>[0-9]+)/rtype_importer/$', admin.site.admin_view(self.rtype_importer), name='rtype_importer'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype/(?P<form_type_pk>[0-9]+)/form_importer/$', admin.site.admin_view(self.formtype_form_importer), name='formtype_form_importer'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype_editor/(?P<form_type_pk>[0-9]+)/$', admin.site.admin_view(self.edit_form_type), name='edit_form_type'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype/(?P<form_type_pk>[0-9]+)/$', admin.site.admin_view(self.view_form_type), name='view_form_type'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype_query_engine/(?P<form_type_pk>[0-9]+)/$', admin.site.admin_view(self.query_form_type), name='query_form_type'),
url(r'^project/(?P<project_pk>[0-9]+)/master_query_engine/$', admin.site.admin_view(self.master_query_engine), name='master_query_engine'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype_generator/$', admin.site.admin_view(self.new_form_type), name='new_form_type'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype/(?P<form_type_pk>[0-9]+)/formtype_template_generator/$', admin.site.admin_view(self.edit_form_type_template), name='edit_form_type_template'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype/(?P<form_type_pk>[0-9]+)/form_generator/$', admin.site.admin_view(self.new_form), name='new_form'),
url(r'^project/(?P<project_pk>[0-9]+)/formtype/(?P<form_type_pk>[0-9]+)/form_editor/(?P<form_pk>[0-9]+)/$', admin.site.admin_view(self.edit_form), name='edit_form')
)
for aURL in urls:
print >>sys.stderr, aURL
return my_urls + urls
##==========================================================================================================================
# EXPERIMENTAL ENDPOINTS *************************************************************************************************
##==========================================================================================================================
def debug_toolA(self, request):
allUsers = User.objects.all()
for aUser in allUsers:
if aUser.first_name == "Robert":
aUser.permissions.user_project_title = "Or is it working Again?"
aUser.save();
def debug_tool(self, request, **kwargs):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="' + 'test' + '.csv"'
writer = csv.writer(response)
titles = []
titles.append('__Title__')
rows = []
for result in Subject.objects.all():
row = []
row_dict = {}
# store title and url
row_dict[0] = result.title
# controlled properties
cps = result.subjectcontrolproperty_set.all()
for each_prop in cps:
prop_name = each_prop.control_property.property.strip()
prop_value = each_prop.control_property_value.title.strip()
if not (prop_name in titles):
column_index = len(titles)
titles.append(prop_name)
else:
column_index = titles.index(prop_name)
if column_index in row_dict:
prop_value = row_dict[column_index] + '; ' + prop_value
row_dict[column_index] = "\"" + prop_value +"\""
# free-form properties
ps = result.subjectproperty_set.all()
for each_prop in ps:
prop_name = each_prop.property.property.strip()
prop_value = each_prop.property_value.strip()
if not (prop_name in titles):
column_index = len(titles)
titles.append(prop_name)
else:
column_index = titles.index(prop_name)
if column_index in row_dict:
prop_value = row_dict[column_index] + '; ' + prop_value
row_dict[column_index] = "\"" + prop_value +"\""
# store row in list
for i in range(len(titles)):
if i in row_dict:
row.append(row_dict[i])
else:
row.append('')
rows.append(row)
# write out the rows, starting with header
writer.writerow(titles)
for each_row in rows:
writer.writerow([unicode(s).encode("utf-8") for s in each_row])
return response
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
# END OF SETUP CUSTOM ADMIN VIEWS
#=======================================================================================================================================================================================================================================
#=======================================================================================================================================================================================================================================
#//////////////////////////////////////////////////////////////////////////////////////////////////
##!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# SET THE ADMIN SITE TO THIS CUSTOM ADMIN !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
admin.site = MyAdminSite() #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
#//////////////////////////////////////////////////////////////////////////////////////////////////
#################################################################################################################################################################################################################################################################################################################################
# END NEW ADMIN
#################################################################################################################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################################################################################################################
| 80.369331
| 344
| 0.497469
| 58,469
| 618,442
| 5.105646
| 0.032701
| 0.014924
| 0.012334
| 0.010954
| 0.820633
| 0.800035
| 0.784324
| 0.772047
| 0.760312
| 0.750374
| 0
| 0.004051
| 0.356926
| 618,442
| 7,694
| 345
| 80.379776
| 0.746563
| 0.311486
| 0
| 0.70395
| 0
| 0.00712
| 0.109335
| 0.025955
| 0.000459
| 0
| 0
| 0.00013
| 0
| 0
| null | null | 0.000689
| 0.016307
| null | null | 0.04961
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
258a915a0b64ef31f74cac3bb88b6e02ec6d473f
| 2,325
|
py
|
Python
|
tracker/migrations/0006_auto_20200820_0227.py
|
Stephan-e/CSR-tracker
|
780ce4c4d4f96a499803b25b3cc9869cfb275566
|
[
"MIT"
] | null | null | null |
tracker/migrations/0006_auto_20200820_0227.py
|
Stephan-e/CSR-tracker
|
780ce4c4d4f96a499803b25b3cc9869cfb275566
|
[
"MIT"
] | null | null | null |
tracker/migrations/0006_auto_20200820_0227.py
|
Stephan-e/CSR-tracker
|
780ce4c4d4f96a499803b25b3cc9869cfb275566
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-08-20 02:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tracker', '0005_auto_20200820_0123'),
]
operations = [
migrations.RemoveField(
model_name='companyorder',
name='carbondioxide_saved',
),
migrations.RemoveField(
model_name='companyorder',
name='land_saved',
),
migrations.RemoveField(
model_name='companyorder',
name='water_saved',
),
migrations.AlterField(
model_name='ingredient',
name='carbondioxide',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='ingredient',
name='land',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='ingredient',
name='water',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='recipe',
name='carbondioxide_use',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='recipe',
name='carbondioxide_use_veg',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='recipe',
name='land_use',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='recipe',
name='land_use_veg',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='recipe',
name='water_use',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
migrations.AlterField(
model_name='recipe',
name='water_use_veg',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=15),
),
]
| 32.746479
| 84
| 0.575914
| 233
| 2,325
| 5.553648
| 0.206009
| 0.083462
| 0.173879
| 0.2017
| 0.846213
| 0.846213
| 0.777434
| 0.698609
| 0.698609
| 0.698609
| 0
| 0.047264
| 0.308387
| 2,325
| 70
| 85
| 33.214286
| 0.757463
| 0.019355
| 0
| 0.703125
| 1
| 0
| 0.120281
| 0.019315
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25c8367cfb6292e36deed8a96d09dc5d80090c90
| 250
|
py
|
Python
|
old_code/src_python/nmpccodegen/Cfunctions/__init__.py
|
kul-forbes/nmpc-codegen
|
0b96da0840504817472b2bdc62c29c98bdf29c8f
|
[
"MIT"
] | 24
|
2017-11-13T02:17:10.000Z
|
2021-03-15T13:47:20.000Z
|
old_code/src_python/nmpccodegen/Cfunctions/__init__.py
|
kul-optec/nmpc-codegen
|
0b96da0840504817472b2bdc62c29c98bdf29c8f
|
[
"MIT"
] | 14
|
2018-01-13T20:20:47.000Z
|
2020-05-12T11:21:12.000Z
|
old_code/src_python/nmpccodegen/Cfunctions/__init__.py
|
kul-optec/nmpc-codegen
|
0b96da0840504817472b2bdc62c29c98bdf29c8f
|
[
"MIT"
] | 5
|
2018-08-14T14:27:41.000Z
|
2020-12-17T08:13:41.000Z
|
from .ProximalFunction import ProximalFunction
from .Function import Cfunction
from .IndicatorBoxFunction import IndicatorBoxFunction
from .IndicatorBoxFunction import IndicatorBoxFunctionProx
from .source_file_operations import Source_file_generator
| 50
| 58
| 0.904
| 24
| 250
| 9.25
| 0.458333
| 0.216216
| 0.27027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076
| 250
| 5
| 59
| 50
| 0.961039
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d37d64087d6db4e166f284982adfde93555f2cce
| 4,178
|
py
|
Python
|
pommerman/configs.py
|
raileanu/playground
|
5c46b9f1823d4ef7c64b5128328ce1c717b0e043
|
[
"Apache-2.0"
] | null | null | null |
pommerman/configs.py
|
raileanu/playground
|
5c46b9f1823d4ef7c64b5128328ce1c717b0e043
|
[
"Apache-2.0"
] | null | null | null |
pommerman/configs.py
|
raileanu/playground
|
5c46b9f1823d4ef7c64b5128328ce1c717b0e043
|
[
"Apache-2.0"
] | null | null | null |
from . import envs
from . import characters
def ffa_v0():
"""Start up a FFA config with the default settings."""
env = envs.v0.Pomme
game_type = envs.utility.GameType.FFA
env_entry_point = 'pommerman.envs.v0:Pomme'
env_id = 'PommeFFA-v0'
env_kwargs = {
'game_type': game_type,
'board_size': envs.utility.BOARD_SIZE,
'agent_view_size': envs.utility.AGENT_VIEW_SIZE,
'num_rigid': envs.utility.NUM_RIGID,
'num_wood': envs.utility.NUM_WOOD,
'num_items': envs.utility.NUM_ITEMS,
'max_steps': envs.utility.MAX_STEPS,
'render_fps': envs.utility.RENDER_FPS,
}
agent = characters.Agent
return locals()
def ffa_v0_fast():
"""Start up a FFA config with the default settings."""
env = envs.v0.Pomme
game_type = envs.utility.GameType.FFA
env_entry_point = 'pommerman.envs.v0:Pomme'
env_id = 'PommeFFAFast-v0'
env_kwargs = {
'game_type': game_type,
'board_size': envs.utility.BOARD_SIZE,
'agent_view_size': envs.utility.AGENT_VIEW_SIZE,
'num_rigid': envs.utility.NUM_RIGID,
'num_wood': envs.utility.NUM_WOOD,
'num_items': envs.utility.NUM_ITEMS,
'max_steps': envs.utility.MAX_STEPS,
'render_fps': 1000,
}
agent = characters.Agent
return locals()
def ffa_v1():
"""Start up a collapsing FFA config with the default settings."""
env = envs.v1.Pomme
game_type = envs.utility.GameType.FFA
env_entry_point = 'pommerman.envs.v1:Pomme'
env_id = 'PommeFFA-v1'
env_kwargs = {
'game_type': game_type,
'board_size': envs.utility.BOARD_SIZE,
'agent_view_size': envs.utility.AGENT_VIEW_SIZE,
'num_rigid': envs.utility.NUM_RIGID,
'num_wood': envs.utility.NUM_WOOD,
'num_items': envs.utility.NUM_ITEMS,
'first_collapse': envs.utility.FIRST_COLLAPSE,
'max_steps': envs.utility.MAX_STEPS,
'render_fps': envs.utility.RENDER_FPS,
}
agent = characters.Agent
return locals()
def team_v0():
"""Start up a team config with the default settings."""
env = envs.v0.Pomme
game_type = envs.utility.GameType.Team
env_entry_point = 'pommerman.envs.v0:Pomme'
env_id = 'PommeTeam-v0'
env_kwargs = {
'game_type': game_type,
'board_size': envs.utility.BOARD_SIZE,
'agent_view_size': envs.utility.AGENT_VIEW_SIZE,
'num_rigid': envs.utility.NUM_RIGID,
'num_wood': envs.utility.NUM_WOOD,
'num_items': envs.utility.NUM_ITEMS,
'max_steps': envs.utility.MAX_STEPS,
'render_fps': envs.utility.RENDER_FPS,
}
agent = characters.Agent
return locals()
def team_v0_fast():
"""Start up a team config with the default settings."""
env = envs.v0.Pomme
game_type = envs.utility.GameType.Team
env_entry_point = 'pommerman.envs.v0:Pomme'
env_id = 'PommeTeamFast-v0'
env_kwargs = {
'game_type': game_type,
'board_size': envs.utility.BOARD_SIZE,
'agent_view_size': envs.utility.AGENT_VIEW_SIZE,
'num_rigid': envs.utility.NUM_RIGID,
'num_wood': envs.utility.NUM_WOOD,
'num_items': envs.utility.NUM_ITEMS,
'max_steps': envs.utility.MAX_STEPS,
'render_fps': 2000,
}
agent = characters.Agent
return locals()
def radio_v2():
"""Start up a team radio config with the default settings."""
env = envs.v2.Pomme
game_type = envs.utility.GameType.TeamRadio
env_entry_point = 'pommerman.envs.v2:Pomme'
env_id = 'PommeRadio-v2'
env_kwargs = {
'game_type': game_type,
'board_size': envs.utility.BOARD_SIZE,
'agent_view_size': envs.utility.AGENT_VIEW_SIZE,
'num_rigid': envs.utility.NUM_RIGID,
'num_wood': envs.utility.NUM_WOOD,
'num_items': envs.utility.NUM_ITEMS,
'max_steps': envs.utility.MAX_STEPS,
'is_partially_observable': True,
'radio_vocab_size': envs.utility.RADIO_VOCAB_SIZE,
'radio_num_words': envs.utility.RADIO_NUM_WORDS,
'render_fps': envs.utility.RENDER_FPS,
}
agent = characters.Agent
return locals()
| 32.897638
| 69
| 0.652944
| 558
| 4,178
| 4.609319
| 0.111111
| 0.209565
| 0.097978
| 0.046656
| 0.870529
| 0.856532
| 0.830482
| 0.800933
| 0.786159
| 0.786159
| 0
| 0.009913
| 0.227382
| 4,178
| 126
| 70
| 33.15873
| 0.786865
| 0.074916
| 0
| 0.712963
| 0
| 0
| 0.197705
| 0.041993
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.018519
| 0
| 0.12963
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9f43a289479186dfdcb210c930da1979a03e99e4
| 615
|
py
|
Python
|
Christmas tree.py
|
donnat-homas123/PROJECT-1.0
|
86877d2f0535c81f85318366b867f97888366782
|
[
"Apache-2.0"
] | null | null | null |
Christmas tree.py
|
donnat-homas123/PROJECT-1.0
|
86877d2f0535c81f85318366b867f97888366782
|
[
"Apache-2.0"
] | null | null | null |
Christmas tree.py
|
donnat-homas123/PROJECT-1.0
|
86877d2f0535c81f85318366b867f97888366782
|
[
"Apache-2.0"
] | null | null | null |
import time
space = " "
x = 20
y = 18
z = 16
for i in range(1,10,2):
print(space*x+("*"*i))
x -= 1
for i in range(5,16,2):
print(space*y+("*"*i))
y -= 1
for i in range(9,22,2):
print(space*z+("*"*i))
z -= 1
for i in range(1,8):
print(space*19+("***"))
time.sleep(5)
#edit
# Christmas Tree
import time
space = " "
x = 20
y = 18
z = 16
for i in range(1,10,2):
print(space*x+("*"*i))
x -= 1
for i in range(5,16,2):
print(space*y+("*"*i))
y -= 1
for i in range(9,22,2):
print(space*z+("*"*i))
z -= 1
for i in range(1,8):
print(space*19+("***"))
time.sleep(5)
| 15
| 27
| 0.502439
| 123
| 615
| 2.512195
| 0.211382
| 0.10356
| 0.15534
| 0.28479
| 0.944984
| 0.944984
| 0.944984
| 0.944984
| 0.944984
| 0.944984
| 0
| 0.112798
| 0.250407
| 615
| 40
| 28
| 15.375
| 0.557484
| 0.030894
| 0
| 1
| 0
| 0
| 0.023609
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.235294
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4ca76b31a991607f4142a478d425003d9b5dca85
| 99
|
py
|
Python
|
First/17.list.py
|
KingJin-web/python
|
bf39a1d130a82bc0775b4f984a32cc3d9139ff39
|
[
"Unlicense"
] | null | null | null |
First/17.list.py
|
KingJin-web/python
|
bf39a1d130a82bc0775b4f984a32cc3d9139ff39
|
[
"Unlicense"
] | null | null | null |
First/17.list.py
|
KingJin-web/python
|
bf39a1d130a82bc0775b4f984a32cc3d9139ff39
|
[
"Unlicense"
] | null | null | null |
t = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
print(t[3])
print(t[-99:-7])
print(t[-99:-5])
print(t[::])
| 16.5
| 39
| 0.393939
| 21
| 99
| 1.857143
| 0.619048
| 0.615385
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081395
| 0.131313
| 99
| 5
| 40
| 19.8
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0.070707
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.8
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
4cab4cd9aefde217d74a59d3c7cbc9f14331a794
| 118
|
py
|
Python
|
python_resumable/models/__init__.py
|
akaushik759/python-resumable
|
0c3a6d908b2c850ea45bfe2a5434ef3cacaa9c51
|
[
"MIT"
] | 6
|
2017-10-19T20:27:46.000Z
|
2020-02-24T09:26:25.000Z
|
python_resumable/models/__init__.py
|
akaushik759/python-resumable
|
0c3a6d908b2c850ea45bfe2a5434ef3cacaa9c51
|
[
"MIT"
] | 1
|
2020-09-10T13:30:33.000Z
|
2020-09-10T13:30:33.000Z
|
python_resumable/models/__init__.py
|
akaushik759/python-resumable
|
0c3a6d908b2c850ea45bfe2a5434ef3cacaa9c51
|
[
"MIT"
] | 2
|
2020-09-24T03:41:46.000Z
|
2020-10-15T17:52:09.000Z
|
from python_resumable.models.chunk import Chunk, FlaskChunk
from python_resumable.models.repository import Repository
| 39.333333
| 59
| 0.881356
| 15
| 118
| 6.8
| 0.533333
| 0.196078
| 0.372549
| 0.490196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076271
| 118
| 2
| 60
| 59
| 0.93578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4cdc584c614bc150893e0fd81a8425dc74be14da
| 76
|
py
|
Python
|
brainbox/population/__init__.py
|
k1o0/ibllib
|
842a8d72e0b75153126296061f4561f477e99646
|
[
"MIT"
] | 1
|
2020-11-21T07:02:21.000Z
|
2020-11-21T07:02:21.000Z
|
brainbox/population/__init__.py
|
k1o0/ibllib
|
842a8d72e0b75153126296061f4561f477e99646
|
[
"MIT"
] | null | null | null |
brainbox/population/__init__.py
|
k1o0/ibllib
|
842a8d72e0b75153126296061f4561f477e99646
|
[
"MIT"
] | null | null | null |
from .population import _get_spike_counts_in_bins
from .population import *
| 38
| 50
| 0.855263
| 11
| 76
| 5.454545
| 0.727273
| 0.466667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 2
| 51
| 38
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4cfc456a12d65acf3dbc44811aeb6fe892800e83
| 30,473
|
py
|
Python
|
house_code/main_programs/PSUPozyx/modules/console_logging_functions.py
|
mukobi/Pozyx-Gabe
|
a8b444c2013b1df5043cd25106b72562409b5130
|
[
"MIT"
] | 1
|
2020-06-12T07:21:56.000Z
|
2020-06-12T07:21:56.000Z
|
house_code/main_programs/PSUPozyx/modules/console_logging_functions.py
|
mukobi/Pozyx-Gabe
|
a8b444c2013b1df5043cd25106b72562409b5130
|
[
"MIT"
] | null | null | null |
house_code/main_programs/PSUPozyx/modules/console_logging_functions.py
|
mukobi/Pozyx-Gabe
|
a8b444c2013b1df5043cd25106b72562409b5130
|
[
"MIT"
] | null | null | null |
import time as time
from .data_functions import DataFunctions as DataFunctions
import sys
sys.stdout.flush()
class ConsoleLoggingFunctions:
@staticmethod
def get_time():
"""
Gets processor time
:return float current_time: the current processor time
"""
current_time = time.time()
return current_time
@staticmethod
def get_elapsed_time(self, start_time):
"""
Gets elapsed time since start_time
:param self:
:param float start_time: time to count from, set at program start
:return float elapsed_time: time passed since start_time
"""
elapsed_time = self.get_time() - start_time
return elapsed_time
@staticmethod
def single_cycle_time_difference(previous_time, current_time):
"""
Calculates the time it took to get to the current cycle
:param float previous_time: the point of time of the previous cycle
:param float current_time: the point of time of the current cycle
:return:
:time_difference: the difference in time between cycles
:new_previous_time: used as previous_time in next cycle
:rtype: float, float
"""
time_difference = current_time - previous_time
new_previous_time = current_time
return time_difference, new_previous_time
@staticmethod
def log_sensor_data_to_console(index, elapsed, data_dictionary):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param dict data_dictionary: a dictionary where the keys are the
labels for each data type to log (e.g. acceleration, magnetic)
and the values are lists of labels and values (for example,
['x', 2, 'y', 3, 'z', 5] )
"""
output = ConsoleLoggingFunctions.create_sensor_data_output(
index, elapsed, data_dictionary)
print(output, flush=True)
@staticmethod
def create_sensor_data_output(index, elapsed, data_dictionary):
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(data_dictionary) == str:
output += data_dictionary
else:
for key in data_dictionary:
output += " | " + key
for item in data_dictionary[key]:
output += " " + str(item)
return output
@staticmethod
def log_position_to_console(index, elapsed, position):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position: position data
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(position) == str:
output += position
else:
output += (" | Pos: " + "X: " + str(position.x)
+ " Y: " + str(position.y)
+ " Z: " + str(position.z))
print(output, flush=True)
@staticmethod
def log_position_and_velocity_to_console(index, elapsed, position, velocity_x, velocity_y, velocity_z):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position: position data
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(position) == str:
output += position
else:
output += (" | Pos: " + "X: " + str(position.x)
+ " Y: " + str(position.y)
+ " Z: " + str(position.z))
output += (" | Vel: " + "X: " + DataFunctions.str_append_length(velocity_x, 7)
+ " Y: " + DataFunctions.str_append_length(velocity_y, 7)
+ " Z: " + DataFunctions.str_append_length(velocity_z, 7))
print(output, flush=True)
@staticmethod
def log_position_to_console_1d(index, elapsed, position):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position: position data
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(position) == str:
output += position
else:
output += (" | Pos: " + "X: " + str(position.distance))
print(output, flush=True)
@staticmethod
def log_position_and_velocity_to_console_1d(index, elapsed, position, velocity):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position: position data
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(position) == str:
output += position
else:
output += (" | Pos: " + "X: " + str(position.distance))
output += (" | Vel: " + "X: " + DataFunctions.str_append_length(velocity, 7))
print(output, flush=True)
@staticmethod
def log_range_motion_and_velocity(
index, elapsed, position, data_dictionary, velocity):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position: position data
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
output += ConsoleLoggingFunctions.create_sensor_data_output(
index, elapsed, data_dictionary)
# if the data passed was an error string
if type(position) == str:
output += position
else:
output += (" | Pos: " + "X: " + str(position.distance))
output += (" | Vel: " + "X: " + DataFunctions.str_append_length(velocity, 7))
print(output, flush=True)
@staticmethod
def log_range_and_motion(
index, elapsed, position, data_dictionary):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position: position data
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
output += ConsoleLoggingFunctions.create_sensor_data_output(
index, elapsed, data_dictionary)
# if the data passed was an error string
if type(position) == str:
output += position
else:
output += (" | Pos: " + "X: " + str(position.distance))
print(output, flush=True)
@staticmethod
def log_multitag_position_to_console(index, elapsed, position_array):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position_array: position data with tags in array
"""
output = str(index)
output += " Time "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
output += " | "
for idx, element in enumerate(position_array):
i = position_array.index(element)
val = idx % 4
mod = idx % 4 != 0
nmod = i % 4
nval = i % 4 != 0
if idx % 4 == 0:
output += hex(element) + " "
elif idx % 4 != 0:
output += str(element) + " "
print(output, flush=True)
@staticmethod
def log_multitag_1D_to_console(index, elapsed, position_array):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param position_array: position data with tags in array
"""
output = str(index)
output += " Time "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
output += " | "
for idx, element in enumerate(position_array):
i = position_array.index(element)
val = idx % 2
mod = idx % 2 != 0
nmod = i % 2
nval = i % 2 != 0
if idx % 2 == 0:
output += hex(element) + " "
elif idx % 2 != 0:
output += str(element) + " "
print(output, flush=True)
@staticmethod
def log_position_and_sensor_data_to_console(index, elapsed, data_dictionary, position):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param dict data_dictionary: a dictionary where the keys are the
labels for each data type to log (e.g. acceleration, magnetic)
and the values are lists of labels and values (for example,
['x', 2, 'y', 3, 'z', 5] )
:param position: position data from device
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(data_dictionary) == str:
output += data_dictionary
elif type(position) == str:
output += position
else:
for key in data_dictionary:
output += " | " + key
for item in data_dictionary[key]:
output += " " + str(item)
output += (" | Pos: " + "X: " + str(position.x)
+ " Y: " + str(position.y)
+ " Z: " + str(position.z))
print(output, flush=True)
@staticmethod
def log_position_and_velocity_and_sensor_data_to_console(index, elapsed, data_dictionary, position, velocity_x, velocity_y, velocity_z):
"""
Prints a line of data to the console
:param int index: data index
:param float elapsed: elapsed time since the program started
:param dict data_dictionary: a dictionary where the keys are the
labels for each data type to log (e.g. acceleration, magnetic)
and the values are lists of labels and values (for example,
['x', 2, 'y', 3, 'z', 5] )
:param position: position data from device
"""
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
# if the data passed was an error string
if type(data_dictionary) == str:
output += data_dictionary
elif type(position) == str:
output += position
else:
for key in data_dictionary:
output += " | " + key
for item in data_dictionary[key]:
output += " " + str(item)
output += (" | Pos: " + "X: " + str(position.x)
+ " Y: " + str(position.y)
+ " Z: " + str(position.z))
output += (" | Vel: " + "X: " + DataFunctions.str_append_length(velocity_x, 6)
+ " Y: " + DataFunctions.str_append_length(velocity_y, 6)
+ " Z: " + DataFunctions.str_append_length(velocity_z, 6))
print(output, flush=True)
@staticmethod
def format_sensor_data(sensor_data, multiple_attributes_to_log):
"""
:param sensor_data:
:param multiple_attributes_to_log:
:return: formatted data dictionary
"""
# if the sensor data was returned as an error string
try:
data_dictionary = {}
for attribute_to_log in multiple_attributes_to_log:
line_of_data = []
if attribute_to_log == "pressure":
attribute_to_log += ":" # add a colon in the output
line_of_data.append(DataFunctions.exp_notation_str_set_length(
DataFunctions, sensor_data.pressure, 10))
elif attribute_to_log == "acceleration":
line_of_data.append("x:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.acceleration.x, 8))
line_of_data.append("y:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.acceleration.y, 8))
line_of_data.append("z:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.acceleration.z, 8))
elif attribute_to_log == "magnetic":
line_of_data.append("x:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.magnetic.x, 8))
line_of_data.append("y:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.magnetic.y, 8))
line_of_data.append("z:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.magnetic.z, 8))
elif attribute_to_log == "angular velocity":
line_of_data.append("x:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.angular_vel.x, 8))
line_of_data.append("y:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.angular_vel.y, 8))
line_of_data.append("z:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.angular_vel.z, 8))
elif attribute_to_log == "euler angles":
line_of_data.append("heading:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.euler_angles.heading, 8))
line_of_data.append("roll:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.euler_angles.roll, 8))
line_of_data.append("pitch:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.euler_angles.pitch, 8))
elif attribute_to_log == "quaternion":
line_of_data.append("x:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.x, 8))
line_of_data.append("y:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.y, 8))
line_of_data.append("z:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.z, 8))
line_of_data.append("w:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.w, 8))
elif attribute_to_log == "linear acceleration":
line_of_data.append("x:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.linear_acceleration.x, 8))
line_of_data.append("y:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.linear_acceleration.y, 8))
line_of_data.append("z:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.linear_acceleration.z, 8))
elif attribute_to_log == "gravity":
line_of_data.append("x:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.gravity_vector.x, 8))
line_of_data.append("y:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.gravity_vector.y, 8))
line_of_data.append("z:")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.gravity_vector.z, 8))
data_dictionary[attribute_to_log.title()] = line_of_data
return data_dictionary
except AttributeError:
return " Error in data"
@staticmethod
def print_data_error_message(index, elapsed_time, message="Error, no data"):
output = (str(index) + " Time: "
+ DataFunctions.str_append_length(elapsed_time, 10) + " "
+ message)
print(output, flush=True)
class CondensedConsoleLogging:
@staticmethod
def get_time():
"""
Gets processor time
:return float current_time: the current processor time
"""
current_time = time.time()
return current_time
@staticmethod
def get_elapsed_time(self, start_time):
"""
Gets elapsed time since start_time
:param self:
:param float start_time: time to count from, set at program start
:return float elapsed_time: time passed since start_time
"""
elapsed_time = self.get_time() - start_time
return elapsed_time
@staticmethod
def single_cycle_time_difference(previous_time, current_time):
"""
Calculates the time it took to get to the current cycle
:param float previous_time: the point of time of the previous cycle
:param float current_time: the point of time of the current cycle
:return:
:time_difference: the difference in time between cycles
:new_previous_time: used as previous_time in next cycle
:rtype: float, float
"""
time_difference = current_time - previous_time
new_previous_time = current_time
return time_difference, new_previous_time
@staticmethod
def build_timestamp(index, elapsed):
output = str(index)
output += " Time: "
elapsed_time_str = DataFunctions.str_append_length(elapsed, 10)
output += elapsed_time_str
output += " Hz: "
ave_hertz = DataFunctions.find_average_hertz(index, elapsed)
ave_hertz_str = DataFunctions.str_append_length(ave_hertz, 5)
output += ave_hertz_str
return output
@staticmethod
def build_tag(single_device):
return " | " + hex(single_device.tag)
@staticmethod
def build_range(single_device):
output = " | Dist "
output += DataFunctions.str_prepend_length(
single_device.device_range.distance, 5)
output += " | Smooth "
output += DataFunctions.str_prepend_length(
int(single_device.smoothed_range + 0.5), 5)
output += " | Vel "
try:
output += DataFunctions.str_prepend_length(
int(single_device.velocity + 0.5), 5)
except TypeError:
output += " "
return output
@staticmethod
def build_position(single_device):
output = " | Pos "
output += DataFunctions.str_prepend_length(
single_device.position.x, 5) + " "
output += DataFunctions.str_prepend_length(
single_device.position.y, 5) + " "
output += DataFunctions.str_prepend_length(
single_device.position.z, 5)
output += " | Smooth "
output += DataFunctions.str_prepend_length(
int(single_device.smoothed_x + 0.5), 5) + " "
output += DataFunctions.str_prepend_length(
int(single_device.smoothed_y + 0.5), 5) + " "
output += DataFunctions.str_prepend_length(
int(single_device.smoothed_z + 0.5), 5)
output += " | Vel "
try:
output += DataFunctions.str_prepend_length(
int(single_device.velocity_x + 0.5), 5) + " "
output += DataFunctions.str_prepend_length(
int(single_device.velocity_y + 0.5), 5) + " "
output += DataFunctions.str_prepend_length(
int(single_device.velocity_z + 0.5), 5)
except TypeError:
output += " " * 15
return output
@staticmethod
def format_sensor_data(sensor_data, multiple_attributes_to_log):
"""
:param sensor_data:
:param multiple_attributes_to_log:
:return: formatted data dictionary
"""
# if the sensor data was returned as an error string
try:
data_dictionary = {}
for attribute_to_log in multiple_attributes_to_log:
line_of_data = []
if attribute_to_log == "pressure":
attribute_to_log = "Press"
line_of_data.append(DataFunctions.str_append_length(
sensor_data.pressure, 8))
elif attribute_to_log == "acceleration":
attribute_to_log = "Acc"
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.acceleration.x, 6))
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.acceleration.y, 6))
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.acceleration.z, 6))
elif attribute_to_log == "magnetic":
attribute_to_log = "Mag"
line_of_data.append(DataFunctions.str_append_length(
sensor_data.magnetic.x, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.magnetic.y, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.magnetic.z, 6))
elif attribute_to_log == "angular velocity":
attribute_to_log = "Ang Vel"
line_of_data.append(DataFunctions.str_append_length(
sensor_data.angular_vel.x, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.angular_vel.y, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.angular_vel.z, 6))
elif attribute_to_log == "euler angles":
attribute_to_log = ""
line_of_data.append("Heading")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.euler_angles.heading, 6))
line_of_data.append("Roll")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.euler_angles.roll, 6))
line_of_data.append("Pitch")
line_of_data.append(DataFunctions.str_append_length(
sensor_data.euler_angles.pitch, 6))
elif attribute_to_log == "quaternion":
attribute_to_log = "Quat"
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.x, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.y, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.z, 6))
line_of_data.append(DataFunctions.str_append_length(
sensor_data.quaternion.w, 6))
elif attribute_to_log == "linear acceleration":
attribute_to_log = "Lin Acc"
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.linear_acceleration.x, 6))
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.linear_acceleration.y, 6))
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.linear_acceleration.z, 6))
elif attribute_to_log == "gravity":
attribute_to_log = "Grav"
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.gravity_vector.x, 6))
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.gravity_vector.y, 6))
line_of_data.append(DataFunctions.str_prepend_length(
sensor_data.gravity_vector.z, 6))
data_dictionary[attribute_to_log.title()] = line_of_data
return data_dictionary
except AttributeError:
return " Error in data"
@staticmethod
def build_sensor_data(single_device_data, attributes_to_log):
"""
Builds motion data output for a tag
"""
if not attributes_to_log:
return ""
motion_data = single_device_data.sensor_data
data_dictionary = CondensedConsoleLogging.format_sensor_data(
motion_data, attributes_to_log)
output = ""
if type(data_dictionary) == str:
output += data_dictionary
else:
for key in data_dictionary:
output += " | " + key
for item in data_dictionary[key]:
output += " " + str(item)
return output
@staticmethod
def print_1d_ranging_output(index, elapsed, ranging_loop_array, attributes_to_log):
output = CondensedConsoleLogging.build_timestamp(index, elapsed)
for single_device in ranging_loop_array:
output += CondensedConsoleLogging.build_tag(single_device)
output += CondensedConsoleLogging.build_sensor_data(
single_device, attributes_to_log)
output += CondensedConsoleLogging.build_range(single_device)
print(output, flush=True)
@staticmethod
def print_3d_positioning_output(index, elapsed, position_loop_array, attributes_to_log):
output = CondensedConsoleLogging.build_timestamp(index, elapsed)
for single_device in position_loop_array:
output += CondensedConsoleLogging.build_tag(single_device)
output += CondensedConsoleLogging.build_sensor_data(
single_device, attributes_to_log)
output += CondensedConsoleLogging.build_position(single_device)
print(output, flush=True)
@staticmethod
def print_motion_data_output(index, elapsed, loop_array, attributes_to_log):
output = CondensedConsoleLogging.build_timestamp(index, elapsed)
for single_device in loop_array:
output += CondensedConsoleLogging.build_tag(single_device)
output += CondensedConsoleLogging.build_sensor_data(
single_device, attributes_to_log)
print(output, flush=True)
| 41.459864
| 140
| 0.585174
| 3,362
| 30,473
| 5.028257
| 0.05354
| 0.085182
| 0.050873
| 0.067199
| 0.935936
| 0.913103
| 0.889323
| 0.872819
| 0.861165
| 0.847915
| 0
| 0.007713
| 0.332064
| 30,473
| 734
| 141
| 41.516349
| 0.822836
| 0.142618
| 0
| 0.726397
| 0
| 0
| 0.029907
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055877
| false
| 0
| 0.00578
| 0.001927
| 0.098266
| 0.036609
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.