hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eebfbc6091ec80b637c96bce91819a090cda329e
| 2,965
|
py
|
Python
|
networkx-d3-v2/lib/gdata/tlslite/api.py
|
suraj-testing2/Clock_Websites
|
0e65331da40cfd3766f1bde17f0a9c7ff6666dea
|
[
"Apache-2.0"
] | 2,293
|
2015-01-02T12:46:10.000Z
|
2022-03-29T09:45:43.000Z
|
third_party/tlslite/tlslite/api.py
|
1065672644894730302/Chromium
|
239dd49e906be4909e293d8991e998c9816eaa35
|
[
"BSD-3-Clause"
] | 315
|
2015-05-31T11:55:46.000Z
|
2022-01-12T08:36:37.000Z
|
third_party/tlslite/tlslite/api.py
|
1065672644894730302/Chromium
|
239dd49e906be4909e293d8991e998c9816eaa35
|
[
"BSD-3-Clause"
] | 1,033
|
2015-01-04T07:48:40.000Z
|
2022-03-24T09:34:37.000Z
|
"""Import this module for easy access to TLS Lite objects.
The TLS Lite API consists of classes, functions, and variables spread
throughout this package. Instead of importing them individually with::
from tlslite.TLSConnection import TLSConnection
from tlslite.HandshakeSettings import HandshakeSettings
from tlslite.errors import *
.
.
It's easier to do::
from tlslite.api import *
This imports all the important objects (TLSConnection, Checker,
HandshakeSettings, etc.) into the global namespace. In particular, it
imports::
from constants import AlertLevel, AlertDescription, Fault
from errors import *
from Checker import Checker
from HandshakeSettings import HandshakeSettings
from Session import Session
from SessionCache import SessionCache
from SharedKeyDB import SharedKeyDB
from TLSConnection import TLSConnection
from VerifierDB import VerifierDB
from X509 import X509
from X509CertChain import X509CertChain
from integration.HTTPTLSConnection import HTTPTLSConnection
from integration.POP3_TLS import POP3_TLS
from integration.IMAP4_TLS import IMAP4_TLS
from integration.SMTP_TLS import SMTP_TLS
from integration.XMLRPCTransport import XMLRPCTransport
from integration.TLSSocketServerMixIn import TLSSocketServerMixIn
from integration.TLSAsyncDispatcherMixIn import TLSAsyncDispatcherMixIn
from integration.TLSTwistedProtocolWrapper import TLSTwistedProtocolWrapper
from utils.cryptomath import cryptlibpyLoaded, m2cryptoLoaded,
gmpyLoaded, pycryptoLoaded, prngName
from utils.keyfactory import generateRSAKey, parsePEMKey, parseXMLKey,
parseAsPublicKey, parsePrivateKey
"""
from constants import AlertLevel, AlertDescription, Fault
from errors import *
from Checker import Checker
from HandshakeSettings import HandshakeSettings
from Session import Session
from SessionCache import SessionCache
from SharedKeyDB import SharedKeyDB
from TLSConnection import TLSConnection
from VerifierDB import VerifierDB
from X509 import X509
from X509CertChain import X509CertChain
from integration.HTTPTLSConnection import HTTPTLSConnection
from integration.TLSSocketServerMixIn import TLSSocketServerMixIn
from integration.TLSAsyncDispatcherMixIn import TLSAsyncDispatcherMixIn
from integration.POP3_TLS import POP3_TLS
from integration.IMAP4_TLS import IMAP4_TLS
from integration.SMTP_TLS import SMTP_TLS
from integration.XMLRPCTransport import XMLRPCTransport
try:
import twisted
del(twisted)
from integration.TLSTwistedProtocolWrapper import TLSTwistedProtocolWrapper
except ImportError:
pass
from utils.cryptomath import cryptlibpyLoaded, m2cryptoLoaded, gmpyLoaded, \
pycryptoLoaded, prngName
from utils.keyfactory import generateRSAKey, parsePEMKey, parseXMLKey, \
parseAsPublicKey, parsePrivateKey
| 39.013158
| 79
| 0.794604
| 297
| 2,965
| 7.892256
| 0.279461
| 0.102389
| 0.046075
| 0.046075
| 0.797782
| 0.743601
| 0.743601
| 0.743601
| 0.743601
| 0.743601
| 0
| 0.013952
| 0.178078
| 2,965
| 75
| 80
| 39.533333
| 0.947887
| 0.594266
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.035714
| 0.821429
| 0
| 0.821429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eec9efd8b4205fb3e524ecc3a7661fc86163d0ef
| 7,271
|
py
|
Python
|
test_system.py
|
Mec-iS/linear-algebra
|
a8d81ff20466cb813a5551b84ceb9eca90aeedca
|
[
"MIT"
] | null | null | null |
test_system.py
|
Mec-iS/linear-algebra
|
a8d81ff20466cb813a5551b84ceb9eca90aeedca
|
[
"MIT"
] | null | null | null |
test_system.py
|
Mec-iS/linear-algebra
|
a8d81ff20466cb813a5551b84ceb9eca90aeedca
|
[
"MIT"
] | null | null | null |
import unittest
from vector import Vector
from plane import Plane
from system import LinearSystem
class TestSystem(unittest.TestCase):
p0 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p1 = Plane(normal_vector=Vector([0, 1, 0]), const_term=2)
p2 = Plane(normal_vector=Vector([1, 1, -1]), const_term=3)
p3 = Plane(normal_vector=Vector([1, 0, -2]), const_term=2)
def test_init(self):
s = LinearSystem([self.p0,self.p1,self.p2,self.p3])
self.assertTrue(s.indices_of_first_nonzero_terms_in_each_row() == [0, 1, 0, 0])
print('{},{},{},{}'.format(s[0],s[1],s[2],s[3]))
print(len(s))
print(s)
s[0] = self.p1
print(s)
def test_rows_swapping_and_multiplications(self):
s = LinearSystem([self.p0,self.p1,self.p2,self.p3])
s.swap_rows(0,1)
self.assertTrue(s[0] == self.p1 and s[1] == self.p0 and s[2] == self.p2 and s[3] == self.p3)
s.swap_rows(1,3)
self.assertTrue(s[0] == self.p1 and s[1] == self.p3 and s[2] == self.p2 and s[3] == self.p0)
s.swap_rows(3,1)
self.assertTrue(s[0] == self.p1 and s[1] == self.p0 and s[2] == self.p2 and s[3] == self.p3)
s.multiply_coefficient_and_row(1,0)
self.assertTrue(s[0] == self.p1 and s[1] == self.p0 and s[2] == self.p2 and s[3] == self.p3)
s.multiply_coefficient_and_row(-1,2)
self.assertTrue(s[0] == self.p1 and
s[1] == self.p0 and
s[2] == Plane(normal_vector=Vector([-1, -1, 1]), const_term=-3) and
s[3] == self.p3)
s.multiply_coefficient_and_row(10,1)
self.assertTrue(s[0] == self.p1 and
s[1] == Plane(normal_vector=Vector([10, 10, 10]), const_term=10) and
s[2] == Plane(normal_vector=Vector([-1, -1, 1]), const_term=-3) and
s[3] == self.p3)
s.add_multiple_times_row_to_row(0,0,1)
self.assertTrue(s[0] == self.p1 and
s[1] == Plane(normal_vector=Vector([10, 10, 10]), const_term=10) and
s[2] == Plane(normal_vector=Vector([-1, -1, 1]), const_term=-3) and
s[3] == self.p3)
s.add_multiple_times_row_to_row(1,0,1)
self.assertTrue(s[0] == self.p1 and
s[1] == Plane(normal_vector=Vector([10, 11, 10]), const_term=12) and
s[2] == Plane(normal_vector=Vector([-1, -1, 1]), const_term=-3) and
s[3] == self.p3)
s.add_multiple_times_row_to_row(-1,1,0)
self.assertTrue(s[0] == Plane(normal_vector=Vector([-10, -10, -10]), const_term=-10) and
s[1] == Plane(normal_vector=Vector([10, 11, 10]), const_term=12) and
s[2] == Plane(normal_vector=Vector([-1, -1, 1]), const_term=-3) and
s[3] == self.p3)
def test_triangular_form(self):
p1 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([0, 1, 1]), const_term=2)
s = LinearSystem([p1,p2])
t = s.compute_triangular_form()
self.assertTrue(t[0] == p1 and
t[1] == p2)
p1 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([1, 1, 1]), const_term=2)
s = LinearSystem([p1,p2])
t = s.compute_triangular_form()
self.assertTrue(t[0] == p1)
#self.assertRaises(ValueError, Plane, Vector([0, 0, 0]), const_term=1)
p1 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([0, 1, 0]), const_term=2)
p3 = Plane(normal_vector=Vector([1, 1, -1]), const_term=3)
p4 = Plane(normal_vector=Vector([1, 0, -2]), const_term=2)
s = LinearSystem([p1,p2,p3,p4])
t = s.compute_triangular_form()
self.assertTrue(t[0] == p1 and
t[1] == p2 and
t[2] == Plane(normal_vector=Vector([0, 0, -2]), const_term=2) #and
#t[3] == Plane(Vector([0, 0, 0]), 0)
)
p1 = Plane(normal_vector=Vector([0, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([1, -1, 1]), const_term=2)
p3 = Plane(normal_vector=Vector([1, 2, -5]), const_term=3)
s = LinearSystem([p1,p2,p3])
t = s.compute_triangular_form()
self.assertTrue(t[0] == Plane(normal_vector=Vector([1, -1, 1]), const_term=2) and
t[1] == Plane(normal_vector=Vector([0, 1, 1]), const_term=1) and
t[2] == Plane(normal_vector=Vector([0, 0, -9]), const_term=-2))
def test_reduced_row_echelon(self):
p1 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([0, 1, 1]), const_term=2)
s = LinearSystem([p1,p2])
r = s.compute_rref()
self.assertTrue(r[0] == Plane(normal_vector=Vector([1, 0, 0]), const_term=-1) and
r[1] == p2)
p1 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([1, 1, 1]), const_term=2)
s = LinearSystem([p1,p2])
r = s.compute_rref()
self.assertTrue(r[0] == p1) # and
#r[1] == Plane(const_term='1'))
p1 = Plane(normal_vector=Vector([1, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([0, 1, 0]), const_term=2)
p3 = Plane(normal_vector=Vector([1, 1, -1]), const_term=3)
p4 = Plane(normal_vector=Vector([1, 0, -2]), const_term=2)
s = LinearSystem([p1,p2,p3,p4])
r = s.compute_rref()
self.assertTrue(r[0] == Plane(normal_vector=Vector([1, 0, 0]), const_term=0) and
r[1] == p2 and
r[2] == Plane(normal_vector=Vector([0, 0, -2]), const_term=2) # and
# r[3] == Plane())
)
p1 = Plane(normal_vector=Vector([0, 1, 1]), const_term=1)
p2 = Plane(normal_vector=Vector([1, -1, 1]), const_term=2)
p3 = Plane(normal_vector=Vector([1, 2, -5]), const_term=3)
s = LinearSystem([p1,p2,p3])
r = s.compute_rref()
self.assertTrue(r[0] == Plane(normal_vector=Vector([1, 0, 0]), const_term=23 / 9) and
r[1] == Plane(normal_vector=Vector([0, 1, 0]), const_term=7 / 9) and
r[2] == Plane(normal_vector=Vector([0, 0, 1]), const_term=2 / 9)
)
def test_compute_gaussian(self):
p1 = Plane(Vector([5.862, 1.178, -10.366]), -8.15)
p2 = Plane(Vector([-2.931, -0.589, 5.183]), -4.075)
l1 = LinearSystem([p1, p2])
result = l1.compute_gaussian_elimination
self.assertRaises(Exception, result)
p3 = Plane(Vector([8.631, 5.112, -1.816]), -5.113)
p4 = Plane(Vector([4.315, 11.132, -5.27]), 6.775)
p5 = Plane(Vector([-2.158, 3.01, -1.727]), -0.831)
l1 = LinearSystem([p3, p4, p5])
print('GE 2', l1.compute_gaussian_elimination())
p6 = Plane(Vector([5.262, 2.739, -9.878]), -3.441)
p7 = Plane(Vector([5.111, 6.358, -7.638]), -2.152)
p8 = Plane(Vector([2.016, -9.924, -1.367]), -9.278)
p9 = Plane(Vector([2.167, -13.543, -18.883]), -10.567)
l1 = LinearSystem([p6, p7, p8, p9])
print('GE 3', l1.compute_gaussian_elimination())
if __name__ == '__main__':
unittest.main()
| 43.023669
| 100
| 0.554119
| 1,142
| 7,271
| 3.382662
| 0.10683
| 0.11183
| 0.202433
| 0.27388
| 0.765208
| 0.745793
| 0.740357
| 0.73984
| 0.739322
| 0.697385
| 0
| 0.105891
| 0.262275
| 7,271
| 169
| 101
| 43.023669
| 0.61428
| 0.02228
| 0
| 0.492424
| 0
| 0
| 0.003801
| 0
| 0
| 0
| 0
| 0
| 0.143939
| 1
| 0.037879
| false
| 0
| 0.030303
| 0
| 0.106061
| 0.045455
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eed6dd12fbe9041a950c2ee17b774d71003d1515
| 161
|
py
|
Python
|
portfolio/views.py
|
vineethcode33/starterPortfolio
|
5935d624f7b0fec63d4d00e64175c25a4e872c61
|
[
"MIT"
] | null | null | null |
portfolio/views.py
|
vineethcode33/starterPortfolio
|
5935d624f7b0fec63d4d00e64175c25a4e872c61
|
[
"MIT"
] | null | null | null |
portfolio/views.py
|
vineethcode33/starterPortfolio
|
5935d624f7b0fec63d4d00e64175c25a4e872c61
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.shortcuts import render_to_response
def home(request):
return render_to_response('portfolio/index.html')
| 20.125
| 53
| 0.813665
| 22
| 161
| 5.772727
| 0.727273
| 0.15748
| 0.251969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118012
| 161
| 7
| 54
| 23
| 0.894366
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
eefeb8c4750839e2398b37bfb6019c46da83fa3e
| 14,841
|
py
|
Python
|
test.py
|
skypc785308/Ecpay_Invoice
|
1e0a8a80d8ce1078a78f64f8688f517d1bcc89fe
|
[
"Apache-2.0"
] | 1
|
2019-09-11T22:50:06.000Z
|
2019-09-11T22:50:06.000Z
|
test.py
|
skypc785308/Ecpay_Invoice
|
1e0a8a80d8ce1078a78f64f8688f517d1bcc89fe
|
[
"Apache-2.0"
] | 1
|
2019-07-03T09:44:49.000Z
|
2019-07-03T11:57:18.000Z
|
test.py
|
skypc785308/Ecpay_Invoice
|
1e0a8a80d8ce1078a78f64f8688f517d1bcc89fe
|
[
"Apache-2.0"
] | 1
|
2019-09-11T22:50:26.000Z
|
2019-09-11T22:50:26.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ecpay_invoice.ecpay_main import *
import time
import random
def test_normal_invoice():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/Issue'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
# 商品資訊
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱一',
'ItemCount': 10,
'ItemWord': '批',
'ItemPrice': 0.5,
'ItemTaxType': '1',
'ItemAmount': 5,
'ItemRemark': '商品備註一'
})
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱二',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 150,
'ItemTaxType': '1',
'ItemAmount': 150,
'ItemRemark': '商品備註二'
})
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱三',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 200,
'ItemTaxType': '1',
'ItemAmount': 200,
'ItemRemark': '商品備註三'
})
RelateNumber = 'ECPAY' + time.strftime("%Y%m%d%H%M%S", time.localtime()) + str(
random.randint(1000000000, 2147483647)) # 產生測試用自訂訂單編號
ecpay_invoice.Send['RelateNumber'] = RelateNumber
ecpay_invoice.Send['CustomerID'] = ''
ecpay_invoice.Send['CustomerIdentifier'] = ''
ecpay_invoice.Send['CustomerName'] = ''
ecpay_invoice.Send['CustomerAddr'] = ''
ecpay_invoice.Send['CustomerPhone'] = ''
ecpay_invoice.Send['CustomerEmail'] = 'test@local.com'
ecpay_invoice.Send['ClearanceMark'] = ''
ecpay_invoice.Send['Print'] = '0'
ecpay_invoice.Send['Donation'] = '0'
ecpay_invoice.Send['LoveCode'] = ''
ecpay_invoice.Send['CarruerType'] = ''
ecpay_invoice.Send['CarruerNum'] = ''
ecpay_invoice.Send['TaxType'] = '1'
ecpay_invoice.Send['SalesAmount'] = 355
ecpay_invoice.Send['InvoiceRemark'] = 'SDK TEST Python V1.0.5'
ecpay_invoice.Send['InvType'] = '07'
ecpay_invoice.Send['vat'] = ''
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print 'RelateNumber:' + str(RelateNumber)
print aReturn_Info
print aReturn_Info['RtnMsg']
print '發票號碼:' + aReturn_Info['InvoiceNumber']
assert aReturn_Info['RtnMsg'] == '開立發票成功'
def test_offline_normal_invoice():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/Issue'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
# 商品資訊
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱一',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 100,
'ItemTaxType': '1',
'ItemAmount': 100,
'ItemRemark': '商品備註一'
})
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱二',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 150,
'ItemTaxType': '1',
'ItemAmount': 150,
'ItemRemark': '商品備註二'
})
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱三',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 250,
'ItemTaxType': '1',
'ItemAmount': 250,
'ItemRemark': '商品備註三'
})
RelateNumber = 'ECPAY' + time.strftime("%Y%m%d%H%M%S", time.localtime()) + str(random.randint(1000000000, 2147483647)) # 產生測試用自訂訂單編號
ecpay_invoice.Send['RelateNumber'] = RelateNumber
ecpay_invoice.Send['CustomerID'] = ''
ecpay_invoice.Send['CustomerIdentifier'] = ''
ecpay_invoice.Send['CustomerName'] = 'test'
ecpay_invoice.Send['CustomerAddr'] = 'test'
ecpay_invoice.Send['CustomerPhone'] = ''
ecpay_invoice.Send['CustomerEmail'] = 'test@abc.com'
ecpay_invoice.Send['ClearanceMark'] = ''
ecpay_invoice.Send['Print'] = '0'
ecpay_invoice.Send['Donation'] = '0'
ecpay_invoice.Send['LoveCode'] = ''
ecpay_invoice.Send['CarruerType'] = ''
ecpay_invoice.Send['CarruerNum'] = ''
ecpay_invoice.Send['TaxType'] = '1'
ecpay_invoice.Send['SalesAmount'] = 500
ecpay_invoice.Send['InvoiceRemark'] = 'SDK TEST Python V1.0.5'
ecpay_invoice.Send['InvType'] = '07'
ecpay_invoice.Send['vat'] = ''
ecpay_invoice.Send['OnLine'] = False
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print 'RelateNumber:' + str(RelateNumber)
print aReturn_Info
print aReturn_Info['RtnMsg']
print '發票號碼' + aReturn_Info['InvoiceNumber']
assert aReturn_Info['RtnMsg'] == '開立發票成功'
def test_delay_invoice():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE_DELAY'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/DelayIssue'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
# 商品資訊
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱一',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 100,
'ItemTaxType': '1',
'ItemAmount': 100,
'ItemRemark': '商品備註一'
})
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱二',
'ItemCount': 2,
'ItemWord': '件',
'ItemPrice': 200,
'ItemTaxType': '1',
'ItemAmount': 400,
'ItemRemark': '商品備註二'
})
RelateNumber = 'ECPAY' + time.strftime("%Y%m%d%H%M%S", time.localtime()) + str(random.randint(1000000000, 2147483647)) # 產生測試用自訂訂單編號
ecpay_invoice.Send['RelateNumber'] = RelateNumber
ecpay_invoice.Send['CustomerID'] = ''
ecpay_invoice.Send['CustomerIdentifier'] = ''
ecpay_invoice.Send['CustomerName'] = ''
ecpay_invoice.Send['CustomerAddr'] = ''
ecpay_invoice.Send['CustomerPhone'] = ''
ecpay_invoice.Send['CustomerEmail'] = 'test@localhost.com'
ecpay_invoice.Send['ClearanceMark'] = ''
ecpay_invoice.Send['Print'] = '0'
ecpay_invoice.Send['Donation'] = '0'
ecpay_invoice.Send['LoveCode'] = ''
ecpay_invoice.Send['CarruerType'] = ''
ecpay_invoice.Send['CarruerNum'] = ''
ecpay_invoice.Send['TaxType'] = '1'
ecpay_invoice.Send['SalesAmount'] = 500
ecpay_invoice.Send['InvoiceRemark'] = 'SDK TEST Python V1.0.5'
ecpay_invoice.Send['InvType'] = '07'
ecpay_invoice.Send['DelayFlag'] = '1'
ecpay_invoice.Send['DelayDay'] = '1'
ecpay_invoice.Send['ECBankID'] = ''
ecpay_invoice.Send['Tsr'] = RelateNumber
ecpay_invoice.Send['PayType'] = '2'
ecpay_invoice.Send['PayAct'] = 'ALLPAY'
ecpay_invoice.Send['NotifyURL'] = ''
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info['OrderNumber']
print 'RelateNumber:' + str(RelateNumber)
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '開立延遲發票成功'
def test_allowance():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'ALLOWANCE'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/Allowance'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
# 商品資訊
ecpay_invoice.Send['Items'].append({
'ItemName': '商品名稱一',
'ItemCount': 1,
'ItemWord': '批',
'ItemPrice': 100,
'ItemTaxType': 1,
'ItemAmount': 100,
'ItemRemark': '商品備註一'
})
RelateNumber = 'ECPAY' + time.strftime("%Y%m%d%H%M%S", time.localtime()) +\
str(random.randint(1000000000, 2147483647)) # 產生測試用自訂訂單編號
ecpay_invoice.Send['CustomerName'] = ''
ecpay_invoice.Send['InvoiceNo'] = 'FY10004005'
ecpay_invoice.Send['AllowanceNotify'] = 'E'
ecpay_invoice.Send['NotifyMail'] = 'test@localhost.com'
ecpay_invoice.Send['NotifyPhone'] = ''
ecpay_invoice.Send['AllowanceAmount'] = 100
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print 'RelateNumber:' + str(RelateNumber)
print aReturn_Info
print aReturn_Info['RtnMsg']
print '折讓編號:'+ aReturn_Info['IA_Allow_No']
assert aReturn_Info['RtnMsg'] == '成功.'
def test_invoice_void():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE_VOID'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/IssueInvalid'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['InvoiceNumber'] = 'FY10004205'
ecpay_invoice.Send['Reason'] = 'ISSUE INVALID TEST'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '作廢發票成功'
def test_allowancs_void():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'ALLOWANCE_VOID'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/AllowanceInvalid'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['InvoiceNo'] = 'FY10004005'
ecpay_invoice.Send['Reason'] = '錯開'
ecpay_invoice.Send['AllowanceNo'] = '2018071615286810'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '成功.'
def test_qissue_invoice():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE_SEARCH'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Query/Issue'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['RelateNumber'] = 'SocialOrder320180716015434'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '查詢發票成功'
def test_qissue_void():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE_VOID_SEARCH'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Query/IssueInvalid'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['RelateNumber'] = 'ECPAY201807161524431519428107'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '查詢作廢發票成功'
def test_qallowance():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'ALLOWANCE_SEARCH'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Query/Allowance'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['InvoiceNo'] = 'FY10004005'
ecpay_invoice.Send['AllowanceNo'] = '2018071615286810'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '成功.'
def test_qallowance_void():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'ALLOWANCE_VOID_SEARCH'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Query/AllowanceInvalid'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['InvoiceNo'] = 'FY10004005'
ecpay_invoice.Send['AllowanceNo'] = '2018071615286810'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '成功.'
def test_invoice_notify():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE_NOTIFY'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Notify/InvoiceNotify'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['InvoiceNo'] = 'FY10004004' # 發票號碼
ecpay_invoice.Send['NotifyMail'] = 'skypc785308@gmal.com' # 發送電子信箱
ecpay_invoice.Send['Notify'] = 'E' # 發送方式
ecpay_invoice.Send['InvoiceTag'] = 'I' # 發送內容類型
ecpay_invoice.Send['Notified'] = 'C'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '成功.'
def test_invoice_trigger_issue():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'INVOICE_TRIGGER'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Invoice/TriggerIssue'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['Tsr'] = 'ECPAY201807161533501566047166'
ecpay_invoice.Send['PayType'] = '2'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '延後開立成功'
def test_check_mobile_barcode():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'CHECK_MOBILE_BARCODE'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Query/CheckMobileBarCode'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['BarCode'] = '/RXNOFER'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '執行成功'
def test_check_love_code():
ecpay_invoice = EcpayInvoice()
# 2.寫入基本介接參數
ecpay_invoice.Invoice_Method = 'CHECK_LOVE_CODE'
ecpay_invoice.Invoice_Url = 'https://einvoice-stage.ecpay.com.tw/Query/CheckLoveCode'
ecpay_invoice.MerchantID = '2000132'
ecpay_invoice.HashKey = 'ejCk326UnaZWKisg'
ecpay_invoice.HashIV = 'q9jcZX8Ib9LM8wYk'
# 3.寫入發票相關資訊
ecpay_invoice.Send['LoveCode'] = '51919'
# 4. 送出
aReturn_Info = ecpay_invoice.Check_Out()
print aReturn_Info
print aReturn_Info['RtnMsg']
assert aReturn_Info['RtnMsg'] == '執行成功'
| 31.643923
| 137
| 0.665656
| 1,591
| 14,841
| 5.993086
| 0.116279
| 0.245412
| 0.161091
| 0.036707
| 0.884006
| 0.867331
| 0.863031
| 0.858416
| 0.844258
| 0.837756
| 0
| 0.050063
| 0.193787
| 14,841
| 469
| 138
| 31.643923
| 0.746845
| 0.035442
| 0
| 0.721893
| 0
| 0
| 0.275316
| 0.007363
| 0
| 0
| 0
| 0
| 0.04142
| 0
| null | null | 0
| 0.008876
| null | null | 0.10355
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
015b6f2470932ed9b611e2649658a5d64d228809
| 120
|
py
|
Python
|
data/__init__.py
|
CSUBioGroup/DeepGOA
|
09802b14ca4f8be6e8d4c2a72e08dbf7876c3b30
|
[
"MIT"
] | 3
|
2020-09-09T15:57:28.000Z
|
2021-12-22T04:39:06.000Z
|
data/__init__.py
|
CSUBioGroup/DeepGOA
|
09802b14ca4f8be6e8d4c2a72e08dbf7876c3b30
|
[
"MIT"
] | 2
|
2020-09-07T16:13:41.000Z
|
2021-07-09T06:13:35.000Z
|
data/__init__.py
|
CSUBioGroup/DeepGOA
|
09802b14ca4f8be6e8d4c2a72e08dbf7876c3b30
|
[
"MIT"
] | null | null | null |
from .data_generator import dataSet_V1
from .data_generator import dataSet_V2
from .data_generator import dataSet_V3
| 30
| 39
| 0.85
| 18
| 120
| 5.333333
| 0.444444
| 0.25
| 0.53125
| 0.71875
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.125
| 120
| 3
| 40
| 40
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
6d6fbfa42a61d0447377cfb417c9e8cb8fa2004b
| 21,097
|
py
|
Python
|
backend/test/analyser/common/test_measurementcontroller.py
|
3ll3d00d/vibe
|
124b029f13ac746723e92cb47e9cb56edd2e54b5
|
[
"MIT"
] | 4
|
2018-05-21T20:32:34.000Z
|
2019-11-20T10:09:17.000Z
|
backend/test/analyser/common/test_measurementcontroller.py
|
3ll3d00d/vibe
|
124b029f13ac746723e92cb47e9cb56edd2e54b5
|
[
"MIT"
] | 31
|
2017-01-19T22:20:10.000Z
|
2019-04-14T12:00:37.000Z
|
backend/test/analyser/common/test_measurementcontroller.py
|
3ll3d00d/vibe
|
124b029f13ac746723e92cb47e9cb56edd2e54b5
|
[
"MIT"
] | 2
|
2019-06-30T00:22:22.000Z
|
2019-10-08T11:38:59.000Z
|
import csv
import datetime
import json
import os
import shutil
from time import sleep
from unittest.mock import MagicMock
import pytest
from analyser.common.devicecontroller import DeviceController
from analyser.common.measurementcontroller import MeasurementController, MeasurementStatus, MEASUREMENT_TIMES_CLASH, \
RecordStatus, CompleteMeasurement, getMeasurementId
from analyser.common.targetstatecontroller import TargetState, TargetStateProvider
from core.httpclient import RecordingHttpClient
from core.interface import RecordingDeviceStatus, DATETIME_FORMAT
DEVICE_MAX_AGE_SECONDS = 20
TIME_TIL_DEATHBED = 3
def cleanUpTmpDir(tmpdir):
try:
shutil.rmtree(str(tmpdir))
except:
import sys
print(sys.exc_info())
@pytest.fixture
def httpclient():
return RecordingHttpClient()
@pytest.fixture
def targetStateProvider():
return TargetStateProvider(TargetState())
@pytest.fixture
def targetStateController():
mm = MagicMock()
return mm
@pytest.fixture
def deviceController(tmpdirPath, targetStateController, httpclient):
controller = DeviceController(targetStateController, tmpdirPath, httpclient, maxAgeSeconds=DEVICE_MAX_AGE_SECONDS)
yield controller
controller.shutdown()
@pytest.fixture
def measurementController(tmpdirPath, targetStateProvider, deviceController):
controller = MeasurementController(targetStateProvider, tmpdirPath, deviceController,
maxTimeTilDeathbedSeconds=TIME_TIL_DEATHBED,
maxTimeOnDeathbedSeconds=TIME_TIL_DEATHBED)
yield controller
controller.shutdown()
def verifyNothingOnDisk(tmpdirPath, name):
assert not os.path.exists(os.path.join(tmpdirPath, 'name'))
def test_scheduledMeasurementWithNoDevice_fails(measurementController, tmpdirPath):
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
startTime = datetime.datetime.utcnow()
accepted, message = measurementController.schedule('first', 0.2, startTime, 'desc')
assert accepted
assert message is None
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 1
assert am[0].name == 'first'
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.SCHEDULED
# TODO should actually be failed because we've given it no devices
# wait for it to be swept away
sleep(2)
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
am = measurementController.getMeasurements(MeasurementStatus.FAILED)
assert am is not None
assert len(am) == 1
assert am[0].name == 'first'
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.FAILED
verifyNothingOnDisk(tmpdirPath, 'first')
def test_clashingMeasurement_isRejected(measurementController, tmpdirPath):
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
startTime = datetime.datetime.utcnow()
accepted, message = measurementController.schedule('first', 0.2, startTime, 'desc')
assert accepted
assert message is None
accepted, message = measurementController.schedule('second', 0.2, startTime + datetime.timedelta(seconds=0.1),
'desc')
assert not accepted
assert message == MEASUREMENT_TIMES_CLASH
verifyNothingOnDisk(tmpdirPath, 'first')
verifyNothingOnDisk(tmpdirPath, 'second')
def test_scheduledMeasurement_IsSentToDevice(measurementController, deviceController, tmpdirPath):
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
startTime = datetime.datetime.utcnow()
accepted, message = measurementController.schedule('first', 0.2, startTime, 'desc')
assert accepted
assert message is None
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 1
assert am[0].name == 'first'
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.SCHEDULED
# TODO should actually be failed because we've given it no devices
# wait for it to be swept away
sleep(1.5)
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
am = measurementController.getMeasurements(MeasurementStatus.FAILED)
assert am is not None
assert len(am) == 1
assert am[0].name == 'first'
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.FAILED
verifyNothingOnDisk(tmpdirPath, 'first')
def test_scheduledMeasurementThatReceivesData_CompletesNormally(measurementController, deviceController, httpclient,
tmpdirPath):
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
device = {'status': RecordingDeviceStatus.INITIALISED.name, 'serviceURL': 'hello'}
device.update(targetStateAsDict())
deviceController.accept('d1', device)
devices = deviceController.getDevices(RecordingDeviceStatus.INITIALISED.name)
assert len(devices) == 1
assert devices[0] is not None
assert devices[0].deviceId is not None
assert devices[0].deviceId == 'd1'
startTime = datetime.datetime.utcnow()
measurementName = 'first'
measurementId = getMeasurementId(startTime, measurementName)
accepted, message = measurementController.schedule(measurementName, 0.2, startTime, 'desc')
assert accepted
assert message is None
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 1
assert am[0].id == measurementId
assert am[0].name == measurementName
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.SCHEDULED
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.SCHEDULED.name
# start the measurement & verify the device states update
assert measurementController.startMeasurement(measurementId, 'd1')
am = measurementController.getMeasurements(MeasurementStatus.RECORDING)
assert am is not None
assert len(am) == 1
assert am[0].id == measurementId
assert am[0].name == measurementName
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.RECORDING
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.RECORDING.name
# verify that some other device is rejected
assert not measurementController.startMeasurement(measurementId, 'd2')
# now send some data and assert it's all accepted
data1 = [0, 1, 1, 1]
data2 = [1, 2, 2, 2]
data3 = [2, 3, 3, 3]
assert measurementController.recordData(measurementId, 'd1', [data1])
assert measurementController.recordData(measurementId, 'd1', [data2])
assert measurementController.recordData(measurementId, 'd1', [data3])
# verify that data from some other device is rejected
assert not measurementController.startMeasurement(measurementId, 'd2')
# complete the measurement and assert the states update
assert measurementController.completeMeasurement(measurementId, 'd1')
am = measurementController.getMeasurements(MeasurementStatus.RECORDING)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.RECORDING
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.COMPLETE.name
# wait for it to be swept into the completed set
sleep(1.5)
am = measurementController.getMeasurements(MeasurementStatus.COMPLETE)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == getFormattedStartTime(startTime)
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.COMPLETE
# check the data is on the disk
dataFile = os.path.join(tmpdirPath, am[0].idAsPath, 'd1', 'data.out')
data = []
with open(dataFile, newline='') as csvfile:
dr = csv.reader(csvfile)
for row in dr:
data.append(row)
assert len(data) == 3
assert data[0] == [str(i) for i in data1]
assert data[1] == [str(i) for i in data2]
assert data[2] == [str(i) for i in data3]
metapath = os.path.join(tmpdirPath, am[0].idAsPath, 'metadata.json')
assert os.path.isfile(metapath)
with open(metapath) as jsonfile:
metadata = json.load(jsonfile)
assert metadata is not None
cm = CompleteMeasurement(metadata, tmpdirPath)
assert cm is not None
assert cm.id == measurementId
assert cm.name == measurementName
assert cm.startTime == getFormattedStartTime(startTime)
assert cm.duration == 0.2
assert cm.description == 'desc'
assert cm.measurementParameters == targetStateAsDict(False)
assert len(cm.recordingDevices) == 1
assert cm.recordingDevices.get('d1') != None
assert cm.recordingDevices.get('d1').get('state') == MeasurementStatus.COMPLETE.name
assert cm.recordingDevices.get('d1').get('reason') == None
def getFormattedStartTime(time=datetime.datetime.utcnow()):
return datetime.datetime.strptime(time.strftime(DATETIME_FORMAT), DATETIME_FORMAT)
def targetStateAsDict(includeSamplesPerBatch=True):
targetState = TargetState()
val = {
'fs': targetState.fs,
'accelerometerSens': targetState.accelerometerSens,
'accelerometerEnabled': targetState.accelerometerEnabled,
'gyroSens': targetState.gyroSens,
'gyroEnabled': targetState.gyroEnabled,
}
if includeSamplesPerBatch:
val.update({'samplesPerBatch': targetState.samplesPerBatch})
return val
def test_completedMeasurementsAreReloaded():
# TODO complete a measurement, confirm it is on disk, reload, check it is loaded
pass
def test_scheduledMeasurement_IsPutOnDeathbed_BeforeFailure(measurementController, deviceController, tmpdirPath):
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
device = {'status': RecordingDeviceStatus.INITIALISED.name, 'serviceURL': 'hello'}
device.update(targetStateAsDict())
deviceController.accept('d1', device)
devices = deviceController.getDevices(RecordingDeviceStatus.INITIALISED.name)
assert len(devices) == 1
assert devices[0] is not None
assert devices[0].deviceId is not None
assert devices[0].deviceId == 'd1'
startTime = datetime.datetime.utcnow() + datetime.timedelta(seconds=0.5)
measurementName = 'first'
measurementId = getMeasurementId(startTime, measurementName)
accepted, message = measurementController.schedule(measurementName, 0.2, startTime, 'desc')
assert accepted
assert message is None
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.SCHEDULED
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.SCHEDULED.name
# start the measurement & verify the device states update
assert measurementController.startMeasurement(measurementId, 'd1')
am = measurementController.getMeasurements(MeasurementStatus.RECORDING)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.RECORDING
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.RECORDING.name
# sleep to push onto the deathbed
sleep(TIME_TIL_DEATHBED + 1)
# check it is on the deathbed
am = measurementController.getMeasurements(MeasurementStatus.DYING)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.DYING
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.RECORDING.name
# sleep til it dies
sleep(TIME_TIL_DEATHBED + 0.5)
am = measurementController.getMeasurements(MeasurementStatus.FAILED)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.FAILED
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.FAILED.name
# check we have the metadata but no data
dataFile = os.path.join(tmpdirPath, am[0].idAsPath, 'd1', 'data.out')
data = []
assert os.path.exists(dataFile)
with open(dataFile, newline='') as csvfile:
dr = csv.reader(csvfile)
for row in dr:
data.append(row)
assert len(data) == 0
metapath = os.path.join(tmpdirPath, am[0].idAsPath, 'metadata.json')
assert os.path.isfile(metapath)
with open(metapath) as jsonfile:
metadata = json.load(jsonfile)
assert metadata is not None
assert metadata['status'] == MeasurementStatus.FAILED.name
assert metadata['name'] == measurementName
assert metadata['startTime'] == startTime.strftime(DATETIME_FORMAT)
assert metadata['duration'] == 0.2
assert metadata['description'] == 'desc'
assert metadata['measurementParameters'] == targetStateAsDict(False)
assert len(metadata['recordingDevices']) == 1
assert metadata['recordingDevices'].get('d1') != None
assert metadata['recordingDevices']['d1'].get('state') == MeasurementStatus.FAILED.name
assert metadata['recordingDevices']['d1'].get('reason') == 'Evicting from deathbed'
def test_scheduledMeasurement_FailsDuringMeasurement_IsStoredAsFailed(measurementController, deviceController,
tmpdirPath):
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 0
device = {'status': RecordingDeviceStatus.INITIALISED.name, 'serviceURL': 'hello'}
device.update(targetStateAsDict())
deviceController.accept('d1', device)
devices = deviceController.getDevices(RecordingDeviceStatus.INITIALISED.name)
assert len(devices) == 1
assert devices[0] is not None
assert devices[0].deviceId is not None
assert devices[0].deviceId == 'd1'
startTime = datetime.datetime.utcnow()
measurementName = 'first'
measurementId = getMeasurementId(startTime, measurementName)
accepted, message = measurementController.schedule(measurementName, 0.2, startTime, 'desc')
assert accepted
assert message is None
am = measurementController.getMeasurements(MeasurementStatus.SCHEDULED)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.SCHEDULED
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.SCHEDULED.name
# start the measurement & verify the device states update
assert measurementController.startMeasurement(measurementId, 'd1')
am = measurementController.getMeasurements(MeasurementStatus.RECORDING)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.RECORDING
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.RECORDING.name
# now send some data and assert it's all accepted
data1 = [0, 1, 1, 1]
data2 = [1, 2, 2, 2]
data3 = [2, 3, 3, 3]
assert measurementController.recordData(measurementId, 'd1', [data1])
assert measurementController.recordData(measurementId, 'd1', [data2])
assert measurementController.recordData(measurementId, 'd1', [data3])
# fail the measurement
assert measurementController.failMeasurement(measurementId, 'd1', 'oh noes')
am = measurementController.getMeasurements(MeasurementStatus.RECORDING)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.RECORDING
assert am[0].recordingDevices.get('d1')
assert am[0].recordingDevices.get('d1')['state'] == RecordStatus.FAILED.name
assert am[0].recordingDevices.get('d1')['reason'] == 'oh noes'
# wait for it to be swept into the failed set
sleep(1.5)
am = measurementController.getMeasurements(MeasurementStatus.COMPLETE)
assert am is not None
assert len(am) == 0
am = measurementController.getMeasurements(MeasurementStatus.FAILED)
assert am is not None
assert len(am) == 1
assert am[0].name == measurementName
assert am[0].id == measurementId
assert am[0].startTime == startTime
assert am[0].duration == 0.2
assert am[0].description == 'desc'
assert am[0].status == MeasurementStatus.FAILED
# check data is still recorded but marked as FAILED
dataFile = os.path.join(tmpdirPath, am[0].idAsPath, 'd1', 'data.out')
data = []
with open(dataFile, newline='') as csvfile:
dr = csv.reader(csvfile)
for row in dr:
data.append(row)
assert len(data) == 3
assert data[0] == [str(i) for i in data1]
assert data[1] == [str(i) for i in data2]
assert data[2] == [str(i) for i in data3]
metapath = os.path.join(tmpdirPath, am[0].idAsPath, 'metadata.json')
assert os.path.isfile(metapath)
with open(metapath) as jsonfile:
metadata = json.load(jsonfile)
assert metadata is not None
assert metadata['status'] == MeasurementStatus.FAILED.name
assert metadata['name'] == measurementName
assert metadata['startTime'] == startTime.strftime(DATETIME_FORMAT)
assert metadata['duration'] == 0.2
assert metadata['description'] == 'desc'
assert metadata['measurementParameters'] == targetStateAsDict(False)
assert len(metadata['recordingDevices']) == 1
assert metadata['recordingDevices'].get('d1') != None
assert metadata['recordingDevices']['d1'].get('state') == MeasurementStatus.FAILED.name
assert metadata['recordingDevices']['d1'].get('reason') == 'oh noes'
# TODO
# check device counts are updated
# check files are copied
# check old is left in place
# confirm get measurements yields correct data
def rename_measurement_deletes_old(measurementController):
pass
def edit_description_is_an_inplace_edit(measurementController):
pass
def trim_left_is_a_copy(measurementController):
pass
def trim_right_is_a_copy(measurementController):
pass
def trim_both_is_a_copy(measurementController):
pass
def trim_and_rename_deletes_old(measurementController):
pass
def rename_device_is_an_inplace_edit(measurementController):
pass
| 39.433645
| 118
| 0.703512
| 2,384
| 21,097
| 6.196309
| 0.100671
| 0.074736
| 0.068846
| 0.034525
| 0.763471
| 0.752505
| 0.740726
| 0.73267
| 0.729285
| 0.729285
| 0
| 0.019371
| 0.190074
| 21,097
| 534
| 119
| 39.507491
| 0.845145
| 0.053041
| 0
| 0.774194
| 0
| 0
| 0.043411
| 0.002105
| 0
| 0
| 0
| 0.001873
| 0.576037
| 1
| 0.052995
| false
| 0.018433
| 0.032258
| 0.006912
| 0.096774
| 0.002304
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d7b4cbaca9042614a4bfe103aab59550042c4d4
| 87,161
|
py
|
Python
|
tests/bench_mark/bench_mark.py
|
wphyojpl/incubator-sdap-in-situ-data-services
|
20fe50e2ebe5b9b4d3b8e266be20ee5265aadab3
|
[
"Apache-2.0"
] | null | null | null |
tests/bench_mark/bench_mark.py
|
wphyojpl/incubator-sdap-in-situ-data-services
|
20fe50e2ebe5b9b4d3b8e266be20ee5265aadab3
|
[
"Apache-2.0"
] | null | null | null |
tests/bench_mark/bench_mark.py
|
wphyojpl/incubator-sdap-in-situ-data-services
|
20fe50e2ebe5b9b4d3b8e266be20ee5265aadab3
|
[
"Apache-2.0"
] | null | null | null |
import json
import requests
from tests.bench_mark.func_exec_time_decorator import func_exec_time_decorator
class BenchMark:
def __init__(self):
self.__cdms_domain = 'http://localhost:30801/insitu'
# self.__cdms_domain = 'https://doms.jpl.nasa.gov/insitu'
# self.__cdms_domain = 'https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu'
self.__size = 100
self.__start_index = 0
self.__provider = 'NCAR'
self.__project = 'ICOADS Release 3.0'
self.__platform_code = '30,41,42'
self.__variable = 'relative_humidity'
self.__columns = 'air_temperature'
self.__start_time = '2017-01-01T00:00:00Z'
self.__end_time = '2017-03-30T00:00:00Z'
self.__min_depth = -99
self.__max_depth = 0
self.__min_lat_lon = (-111, 11)
self.__max_lat_lon = (111, 99)
@func_exec_time_decorator
def __execute_query(self):
"""
time curl 'https://doms.jpl.nasa.gov/insitu?startIndex=3&itemsPerPage=20&minDepth=-99&variable=relative_humidity&columns=air_temperature&maxDepth=-1&startTime=2019-02-14T00:00:00Z&endTime=2021-02-16T00:00:00Z&platform=3B&bbox=-111,11,111,99'
:return:
"""
# rest_keyword = 'query_data_doms_custom_pagination'
rest_keyword = 'query_data_doms'
print(f'{self.__cdms_domain}/1.0/{rest_keyword}?startIndex={self.__start_index}&itemsPerPage={self.__size}'
f'&provider={self.__provider}'
f'&project={self.__project}'
f'&platform={self.__platform_code}'
f'{"" if self.__variable is None else f"&variable={self.__variable}"}'
f'{"" if self.__columns is None else f"&columns={self.__columns}"}'
f'&minDepth={self.__min_depth}&maxDepth={self.__max_depth}'
f'&startTime={self.__start_time}&endTime={self.__end_time}'
f'&bbox={self.__min_lat_lon[0]},{self.__min_lat_lon[1]},{self.__max_lat_lon[0]},{self.__max_lat_lon[1]}')
response = requests.get(
url=f'{self.__cdms_domain}/1.0/{rest_keyword}?startIndex={self.__start_index}&itemsPerPage={self.__size}'
f'&provider={self.__provider}'
f'&project={self.__project}'
f'&platform={self.__platform_code}'
f'{"" if self.__variable is None else f"&variable={self.__variable}"}'
f'{"" if self.__columns is None else f"&columns={self.__columns}"}'
f'&minDepth={self.__min_depth}&maxDepth={self.__max_depth}'
f'&startTime={self.__start_time}&endTime={self.__end_time}'
f'&bbox={self.__min_lat_lon[0]},{self.__min_lat_lon[1]},{self.__max_lat_lon[0]},{self.__max_lat_lon[1]}', verify=False
)
if response.status_code > 400:
raise ValueError(f'wrong status code: {response.status_code}. details: {response.text}')
return json.loads(response.text)
@func_exec_time_decorator
def __execute_query_custom_pagination(self):
"""
time curl 'https://doms.jpl.nasa.gov/insitu?startIndex=3&itemsPerPage=20&minDepth=-99&variable=relative_humidity&columns=air_temperature&maxDepth=-1&startTime=2019-02-14T00:00:00Z&endTime=2021-02-16T00:00:00Z&platform=3B&bbox=-111,11,111,99'
:return:
"""
rest_keyword = 'query_data_doms_custom_pagination'
get_url = f'{self.__cdms_domain}/1.0/{rest_keyword}?startIndex={self.__start_index}&itemsPerPage={self.__size}' \
f'&provider={self.__provider}' \
f'&project={self.__project}' \
f'&platform={self.__platform_code}' \
f'{"" if self.__variable is None else f"&variable={self.__variable}"}' \
f'{"" if self.__columns is None else f"&columns={self.__columns}"}' \
f'&minDepth={self.__min_depth}&maxDepth={self.__max_depth}' \
f'&startTime={self.__start_time}&endTime={self.__end_time}' \
f'&bbox={self.__min_lat_lon[1]},{self.__min_lat_lon[0]},{self.__max_lat_lon[1]},{self.__max_lat_lon[0]}'
# rest_keyword = 'query_data_doms'
print(get_url)
response = requests.get(url=get_url, verify=False)
if response.status_code > 400:
raise ValueError(f'wrong status code: {response.status_code}. details: {response.text}')
return json.loads(response.text)
@func_exec_time_decorator
def __execute_blind_query(self, get_url):
"""
:return:
"""
print(get_url)
response = requests.get(url=get_url, verify=False)
if response.status_code > 400:
raise ValueError(f'wrong status code: {response.status_code}. details: {response.text}')
return json.loads(response.text)
@func_exec_time_decorator
def __execute_query_custom_pagination_paginate(self, markerTime, markerPlatform):
"""
time curl 'https://doms.jpl.nasa.gov/insitu?startIndex=3&itemsPerPage=20&minDepth=-99&variable=relative_humidity&columns=air_temperature&maxDepth=-1&startTime=2019-02-14T00:00:00Z&endTime=2021-02-16T00:00:00Z&platform=3B&bbox=-111,11,111,99'
:return:
"""
rest_keyword = 'query_data_doms_custom_pagination'
get_url = f'{self.__cdms_domain}/1.0/{rest_keyword}?startIndex={self.__start_index}&itemsPerPage={self.__size}' \
f'&provider={self.__provider}' \
f'&markerTime={markerTime}' \
f'&markerPlatform={markerPlatform}' \
f'&project={self.__project}' \
f'&platform={self.__platform_code}' \
f'{"" if self.__variable is None else f"&variable={self.__variable}"}' \
f'{"" if self.__columns is None else f"&columns={self.__columns}"}' \
f'&minDepth={self.__min_depth}&maxDepth={self.__max_depth}' \
f'&startTime={self.__start_time}&endTime={self.__end_time}' \
f'&bbox={self.__min_lat_lon[0]},{self.__min_lat_lon[1]},{self.__max_lat_lon[0]},{self.__max_lat_lon[1]}'
# rest_keyword = 'query_data_doms'
print(get_url)
response = requests.get(url=get_url, verify=False)
if response.status_code > 400:
raise ValueError(f'wrong status code: {response.status_code}. details: {response.text}')
return json.loads(response.text)
def pagination_bench_mark(self):
"""
Connected to pydev debugger (build 201.7223.92)
http://localhost:30801/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 0 -- total: 121010 -- current_count: 20000 -- duration: 21.127051
first_item: {'air_temperature': 4.5, 'relative_humidity': 78.6, 'time': '2017-01-12T00:00:00Z', 'depth': -99999.0, 'latitude': 61.6, 'longitude': 1.3}
http://localhost:30801/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 20000 -- total: 121010 -- current_count: 20000 -- duration: 22.362244
first_item: {'air_temperature': 23.8, 'relative_humidity': 77.8, 'time': '2017-01-25T16:49:48Z', 'depth': -99999.0, 'latitude': 25.9, 'longitude': -89.7}
http://localhost:30801/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 40000 -- total: 121010 -- current_count: 20000 -- duration: 22.788451
first_item: {'air_temperature': 8.4, 'relative_humidity': 94.7, 'time': '2017-01-07T06:49:48Z', 'depth': -99999.0, 'latitude': 29.3, 'longitude': -88.7}
http://localhost:30801/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 60000 -- total: 121010 -- current_count: 20000 -- duration: 39.308498
first_item: {'air_temperature': 10.7, 'relative_humidity': 61.9, 'time': '2017-01-05T15:00:00Z', 'depth': -99999.0, 'latitude': 57.8, 'longitude': -0.9}
http://localhost:30801/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 80000 -- total: 121010 -- current_count: 20000 -- duration: 28.825153
first_item: {'air_temperature': 16.9, 'relative_humidity': 62.9, 'time': '2017-01-19T12:10:12Z', 'depth': -99999.0, 'latitude': 33.4, 'longitude': -77.7}
http://localhost:30801/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 100000 -- total: 121010 -- current_count: 20000 -- duration: 32.178053
first_item: {'air_temperature': 3.4, 'relative_humidity': 89.9, 'time': '2017-01-07T22:00:00Z', 'depth': -99999.0, 'latitude': 33.4, 'longitude': -77.7}
http://localhost:30801/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 120000 -- total: 121010 -- current_count: 1010 -- duration: 21.210814
first_item: {'air_temperature': 5.8, 'relative_humidity': 78.8, 'time': '2017-01-13T00:00:00Z', 'depth': -99999.0, 'latitude': 61.3, 'longitude': 1.5}
http://localhost:30801/1.0/query_data_doms?startIndex=140000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2017-03-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2017-03-30T00:00:00Z -- start_index: 140000 -- total: 121010 -- current_count: 0 -- duration: 14.375344
Process finished with exit code 0
Connected to pydev debugger (build 201.7223.92)
http://localhost:30801/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 0 -- total: 2076034 -- current_count: 20000 -- duration: 124.080378
first_item: {'air_temperature': 24.5, 'relative_humidity': 73.7, 'time': '2017-09-30T12:30:00Z', 'depth': -99999.0, 'latitude': 33.4, 'longitude': -77.7}
http://localhost:30801/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 20000 -- total: 2076034 -- current_count: 20000 -- duration: 134.163414
first_item: {'air_temperature': 8.7, 'relative_humidity': 91.5, 'time': '2017-05-12T16:19:48Z', 'depth': -99999.0, 'latitude': 44.0, 'longitude': -86.6}
http://localhost:30801/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 40000 -- total: 2076034 -- current_count: 20000 -- duration: 170.192412
first_item: {'air_temperature': 27.1, 'relative_humidity': 76.9, 'time': '2017-08-25T13:19:48Z', 'depth': -99999.0, 'latitude': 28.9, 'longitude': -78.5}
http://localhost:30801/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 60000 -- total: 2076034 -- current_count: 20000 -- duration: 174.84866
first_item: {'air_temperature': 10.7, 'relative_humidity': 79.0, 'time': '2017-10-18T15:00:00Z', 'depth': -99999.0, 'latitude': 57.0, 'longitude': 1.8}
http://localhost:30801/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 80000 -- total: 2076034 -- current_count: 20000 -- duration: 174.773341
first_item: {'air_temperature': 22.3, 'relative_humidity': 69.2, 'time': '2017-04-17T00:40:12Z', 'depth': -99999.0, 'latitude': 33.4, 'longitude': -77.7}
http://localhost:30801/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 100000 -- total: 2076034 -- current_count: 20000 -- duration: 200.328648
first_item: {'air_temperature': 22.2, 'relative_humidity': 99.4, 'time': '2017-07-11T10:10:12Z', 'depth': -99999.0, 'latitude': 41.8, 'longitude': -87.0}
http://localhost:30801/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 120000 -- total: 2076034 -- current_count: 20000 -- duration: 196.793639
first_item: {'air_temperature': 26.3, 'relative_humidity': 80.6, 'time': '2017-05-09T03:30:00Z', 'depth': -99999.0, 'latitude': 21.1, 'longitude': -64.9}
http://localhost:30801/1.0/query_data_doms?startIndex=140000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 140000 -- total: 2076034 -- current_count: 20000 -- duration: 225.118882
first_item: {'air_temperature': 28.2, 'relative_humidity': 71.2, 'time': '2017-04-26T15:19:48Z', 'depth': -99999.0, 'latitude': 18.4, 'longitude': -69.6}
http://localhost:30801/1.0/query_data_doms?startIndex=160000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 160000 -- total: 2076034 -- current_count: 20000 -- duration: 216.740009
first_item: {'air_temperature': 18.1, 'relative_humidity': 48.6, 'time': '2017-11-20T00:00:00Z', 'depth': -99999.0, 'latitude': 32.5, 'longitude': -79.1}
http://localhost:30801/1.0/query_data_doms?startIndex=180000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 180000 -- total: 2076034 -- current_count: 20000 -- duration: 235.660017
first_item: {'air_temperature': 22.8, 'relative_humidity': 79.1, 'time': '2017-08-15T05:00:00Z', 'depth': -99999.0, 'latitude': 41.8, 'longitude': -87.0}
http://localhost:30801/1.0/query_data_doms?startIndex=200000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 200000 -- total: 2076034 -- current_count: 20000 -- duration: 249.714485
first_item: {'air_temperature': 16.9, 'relative_humidity': 90.3, 'time': '2017-05-29T09:10:12Z', 'depth': -99999.0, 'latitude': 41.6, 'longitude': -81.8}
http://localhost:30801/1.0/query_data_doms?startIndex=220000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 220000 -- total: 2076034 -- current_count: 20000 -- duration: 253.446502
first_item: {'air_temperature': 19.3, 'relative_humidity': 62.6, 'time': '2017-04-27T16:49:48Z', 'depth': -99999.0, 'latitude': 41.6, 'longitude': -81.8}
http://localhost:30801/1.0/query_data_doms?startIndex=240000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 240000 -- total: 2076034 -- current_count: 20000 -- duration: 270.454133
first_item: {'air_temperature': 14.7, 'relative_humidity': 68.6, 'time': '2017-08-30T12:00:00Z', 'depth': -99999.0, 'latitude': 56.4, 'longitude': 2.1}
http://localhost:30801/1.0/query_data_doms?startIndex=260000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-01-01T00:00:00Z - 2018-01-30T00:00:00Z -- start_index: 260000 -- total: 2076034 -- current_count: 20000 -- duration: 269.728347
first_item: {'air_temperature': 8.3, 'relative_humidity': 90.3, 'time': '2017-01-07T12:00:00Z', 'depth': -99999.0, 'latitude': 55.0, 'longitude': 6.4}
http://localhost:30801/1.0/query_data_doms?startIndex=280000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-01-01T00:00:00Z&endTime=2018-01-30T00:00:00Z&bbox=-111,11,111,99
Connected to pydev debugger (build 201.7223.92)
http://localhost:30801/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 0 -- total: 168250 -- current_count: 20000 -- duration: 17.341993
first_item: {'air_temperature': 7.6, 'relative_humidity': 91.5, 'time': '2017-04-20T12:00:00Z', 'depth': -99999.0, 'latitude': 57.7, 'longitude': 1.8}
http://localhost:30801/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 20000 -- total: 168250 -- current_count: 20000 -- duration: 21.21373
first_item: {'air_temperature': 23.2, 'relative_humidity': 64.3, 'time': '2017-04-16T19:19:48Z', 'depth': -99999.0, 'latitude': 23.8, 'longitude': -68.4}
http://localhost:30801/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 40000 -- total: 168250 -- current_count: 20000 -- duration: 20.055859
first_item: {'air_temperature': 14.6, 'relative_humidity': 62.8, 'time': '2017-04-15T00:40:12Z', 'depth': -99999.0, 'latitude': 42.0, 'longitude': -86.6}
http://localhost:30801/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 60000 -- total: 168250 -- current_count: 20000 -- duration: 35.323143
first_item: {'air_temperature': 22.8, 'relative_humidity': 56.5, 'time': '2017-04-25T15:40:12Z', 'depth': -99999.0, 'latitude': 26.0, 'longitude': -85.6}
http://localhost:30801/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 80000 -- total: 168250 -- current_count: 20000 -- duration: 40.637501
first_item: {'air_temperature': 27.8, 'relative_humidity': 62.5, 'time': '2017-04-08T20:10:12Z', 'depth': -99999.0, 'latitude': 11.3, 'longitude': -60.5}
http://localhost:30801/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 100000 -- total: 168250 -- current_count: 20000 -- duration: 47.147783
first_item: {'air_temperature': 5.3, 'relative_humidity': 70.1, 'time': '2017-04-26T11:00:00Z', 'depth': -99999.0, 'latitude': 53.3, 'longitude': 2.0}
http://localhost:30801/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 120000 -- total: 168250 -- current_count: 20000 -- duration: 53.092327
first_item: {'air_temperature': 27.2, 'relative_humidity': 63.6, 'time': '2017-04-01T16:00:00Z', 'depth': -99999.0, 'latitude': 19.8, 'longitude': -70.7}
http://localhost:30801/1.0/query_data_doms?startIndex=140000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 140000 -- total: 168250 -- current_count: 20000 -- duration: 33.10979
first_item: {'air_temperature': 9.0, 'relative_humidity': 87.3, 'time': '2017-04-01T14:00:00Z', 'depth': -99999.0, 'latitude': 57.6, 'longitude': 1.1}
http://localhost:30801/1.0/query_data_doms?startIndex=160000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 160000 -- total: 168250 -- current_count: 8250 -- duration: 27.929617
first_item: {'air_temperature': 23.8, 'relative_humidity': 67.8, 'time': '2017-04-19T02:10:12Z', 'depth': -99999.0, 'latitude': 21.6, 'longitude': -58.6}
http://localhost:30801/1.0/query_data_doms?startIndex=180000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 180000 -- total: 168250 -- current_count: 0 -- duration: 9.532945
Process finished with exit code 0
Connected to pydev debugger (build 201.7223.92)
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 0 -- total: 168250 -- current_count: 20000 -- duration: 24.769869
first_item: {'air_temperature': 7.8, 'relative_humidity': 91.5, 'time': '2017-04-01T09:00:00Z', 'depth': -99999.0, 'latitude': 61.6, 'longitude': 1.3}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 20000 -- total: 168250 -- current_count: 20000 -- duration: 10.757908
first_item: {'air_temperature': 23.2, 'relative_humidity': 64.3, 'time': '2017-04-16T19:19:48Z', 'depth': -99999.0, 'latitude': 23.8, 'longitude': -68.4}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 40000 -- total: 168250 -- current_count: 20000 -- duration: 11.468385
first_item: {'air_temperature': 24.0, 'relative_humidity': 98.2, 'time': '2017-04-05T21:49:48Z', 'depth': -99999.0, 'latitude': 28.8, 'longitude': -86.0}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 60000 -- total: 168250 -- current_count: 20000 -- duration: 12.194898
first_item: {'air_temperature': 20.4, 'relative_humidity': 82.4, 'time': '2017-04-22T03:10:12Z', 'depth': -99999.0, 'latitude': 31.9, 'longitude': -69.6}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 80000 -- total: 168250 -- current_count: 20000 -- duration: 13.594509
first_item: {'air_temperature': 4.2, 'relative_humidity': 70.4, 'time': '2017-04-22T13:00:00Z', 'depth': -99999.0, 'latitude': 60.6, 'longitude': 1.6}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 100000 -- total: 168250 -- current_count: 20000 -- duration: 16.949609
first_item: {'air_temperature': 5.2, 'relative_humidity': 58.4, 'time': '2017-04-16T03:00:00Z', 'depth': -99999.0, 'latitude': 61.1, 'longitude': 1.0}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 120000 -- total: 168250 -- current_count: 20000 -- duration: 45.506358
first_item: {'air_temperature': 26.6, 'relative_humidity': 61.9, 'time': '2017-04-19T15:19:48Z', 'depth': -99999.0, 'latitude': 19.8, 'longitude': -70.7}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=140000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 140000 -- total: 168250 -- current_count: 20000 -- duration: 57.124638
first_item: {'air_temperature': 14.8, 'relative_humidity': 65.5, 'time': '2017-04-25T01:49:48Z', 'depth': -99999.0, 'latitude': 41.6, 'longitude': -81.8}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=160000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 160000 -- total: 168250 -- current_count: 8250 -- duration: 22.821795
first_item: {'air_temperature': 27.8, 'relative_humidity': 76.5, 'time': '2017-04-15T02:40:12Z', 'depth': -99999.0, 'latitude': 11.3, 'longitude': -60.5}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=180000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-04-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-04-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 180000 -- total: 168250 -- current_count: 0 -- duration: 3.48374
Process finished with exit code 0
Connected to pydev debugger (build 201.7223.92)
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 0 -- total: 227787 -- current_count: 20000 -- duration: 11.207761
first_item: {'air_temperature': 14.1, 'relative_humidity': 61.9, 'time': '2017-06-21T12:00:00Z', 'depth': -99999.0, 'latitude': 61.6, 'longitude': 1.3}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 20000 -- total: 227787 -- current_count: 20000 -- duration: 11.799825
first_item: {'air_temperature': 26.0, 'relative_humidity': 90.9, 'time': '2017-06-16T23:19:48Z', 'depth': -99999.0, 'latitude': 31.9, 'longitude': -69.6}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 40000 -- total: 227787 -- current_count: 20000 -- duration: 14.555546
first_item: {'air_temperature': 11.3, 'relative_humidity': 97.4, 'time': '2017-06-30T01:00:00Z', 'depth': -99999.0, 'latitude': 46.8, 'longitude': -91.8}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 60000 -- total: 227787 -- current_count: 20000 -- duration: 18.231606
first_item: {'air_temperature': 28.4, 'relative_humidity': 89.5, 'time': '2017-06-24T09:49:48Z', 'depth': -99999.0, 'latitude': 27.9, 'longitude': -95.4}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 80000 -- total: 227787 -- current_count: 20000 -- duration: 15.588901
first_item: {'air_temperature': 10.5, 'relative_humidity': 85.1, 'time': '2017-06-07T03:49:48Z', 'depth': -99999.0, 'latitude': 47.3, 'longitude': -88.6}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 100000 -- total: 227787 -- current_count: 20000 -- duration: 40.102472
first_item: {'air_temperature': 27.3, 'relative_humidity': 76.9, 'time': '2017-06-04T06:49:48Z', 'depth': -99999.0, 'latitude': 14.5, 'longitude': -53.0}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 120000 -- total: 227787 -- current_count: 20000 -- duration: 54.26978
first_item: {'air_temperature': 28.1, 'relative_humidity': 82.3, 'time': '2017-06-17T01:19:48Z', 'depth': -99999.0, 'latitude': 16.4, 'longitude': -63.2}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=140000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 140000 -- total: 227787 -- current_count: 20000 -- duration: 41.277232
first_item: {'air_temperature': 16.1, 'relative_humidity': 82.9, 'time': '2017-06-07T22:49:48Z', 'depth': -99999.0, 'latitude': 36.6, 'longitude': -74.8}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=160000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 160000 -- total: 227787 -- current_count: 20000 -- duration: 51.041598
first_item: {'air_temperature': 15.4, 'relative_humidity': 76.0, 'time': '2017-06-04T14:00:00Z', 'depth': -99999.0, 'latitude': 45.2, 'longitude': -5.0}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=180000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-06-01T00:00:00Z - 2017-09-30T00:00:00Z -- start_index: 180000 -- total: 227787 -- current_count: 20000 -- duration: 43.003454
first_item: {'air_temperature': 15.4, 'relative_humidity': 96.8, 'time': '2017-06-23T17:49:48Z', 'depth': -99999.0, 'latitude': 43.5, 'longitude': -70.1}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=200000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-06-01T00:00:00Z&endTime=2017-09-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
Traceback (most recent call last):
File "/Applications/PyCharm.app/Contents/plugins/python/helpers/pydev/pydevd.py", line 1438, in _exec
pydev_imports.execfile(file, globals, locals) # execute the script
File "/Applications/PyCharm.app/Contents/plugins/python/helpers/pydev/_pydev_imps/_pydev_execfile.py", line 18, in execfile
exec(compile(contents+"\n", file, 'exec'), glob, loc)
File "/Users/wphyo/Projects/access/parquet_test_1/tests/bench_mark/bench_mark.py", line 326, in <module>
BenchMark().pagination_bench_mark()
File "/Users/wphyo/Projects/access/parquet_test_1/tests/bench_mark/bench_mark.py", line 233, in pagination_bench_mark
response = self.__execute_query()
File "/Users/wphyo/Projects/access/parquet_test_1/tests/bench_mark/func_exec_time_decorator.py", line 12, in decorated_function
func_result = f(*args, **kwargs)
File "/Users/wphyo/Projects/access/parquet_test_1/tests/bench_mark/bench_mark.py", line 56, in __execute_query
raise ValueError(f'wrong status code: {response.status_code}. details: {response.text}')
ValueError: wrong status code: 504. details:
Connected to pydev debugger (build 201.7223.92)
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 0 -- total: 178348 -- current_count: 20000 -- duration: 7.309901
first_item: {'air_temperature': 5.8, 'relative_humidity': 59.4, 'time': '2017-10-29T09:00:00Z', 'depth': -99999.0, 'latitude': 61.6, 'longitude': 1.3}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 20000 -- total: 178348 -- current_count: 20000 -- duration: 14.485547
first_item: {'air_temperature': 14.1, 'relative_humidity': 88.9, 'time': '2017-10-08T08:00:00Z', 'depth': -99999.0, 'latitude': 46.8, 'longitude': -91.8}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 40000 -- total: 178348 -- current_count: 20000 -- duration: 21.060409
first_item: {'air_temperature': 28.2, 'relative_humidity': 85.3, 'time': '2017-10-10T18:49:48Z', 'depth': -99999.0, 'latitude': 29.2, 'longitude': -88.2}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 60000 -- total: 178348 -- current_count: 20000 -- duration: 18.033041
first_item: {'air_temperature': 13.3, 'relative_humidity': 78.8, 'time': '2017-10-14T16:00:00Z', 'depth': -99999.0, 'latitude': 57.0, 'longitude': 1.9}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 80000 -- total: 178348 -- current_count: 20000 -- duration: 35.704179
first_item: {'air_temperature': 10.7, 'relative_humidity': 86.3, 'time': '2017-10-25T19:00:00Z', 'depth': -99999.0, 'latitude': 59.7, 'longitude': 1.6}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 100000 -- total: 178348 -- current_count: 20000 -- duration: 44.254885
first_item: {'air_temperature': 11.3, 'relative_humidity': 70.8, 'time': '2017-10-19T13:00:00Z', 'depth': -99999.0, 'latitude': 61.3, 'longitude': 1.5}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 120000 -- total: 178348 -- current_count: 20000 -- duration: 46.126414
first_item: {'air_temperature': 4.3, 'relative_humidity': 71.0, 'time': '2017-10-29T05:30:00Z', 'depth': -99999.0, 'latitude': 42.1, 'longitude': -87.7}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=140000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 140000 -- total: 178348 -- current_count: 20000 -- duration: 55.652159
first_item: {'air_temperature': 15.5, 'relative_humidity': 96.8, 'time': '2017-10-15T00:00:00Z', 'depth': -99999.0, 'latitude': 54.1, 'longitude': 14.2}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=160000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 160000 -- total: 178348 -- current_count: 18348 -- duration: 59.856939
first_item: {'air_temperature': 27.9, 'relative_humidity': 83.8, 'time': '2017-10-26T08:10:12Z', 'depth': -99999.0, 'latitude': 16.9, 'longitude': -81.4}
https://a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com/insitu/1.0/query_data_doms?startIndex=180000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=41&variable=relative_humidity&columns=air_temperature&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
/Users/wphyo/anaconda3/envs/cdms_parquet_3.6/lib/python3.6/site-packages/urllib3-1.26.7-py3.6.egg/urllib3/connectionpool.py:1020: InsecureRequestWarning: Unverified HTTPS request is being made to host 'a106a87ec5ba747c5915cc0ec23c149f-881305611.us-west-2.elb.amazonaws.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
InsecureRequestWarning,
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 180000 -- total: 178348 -- current_count: 0 -- duration: 3.206068
Process finished with exit code 0
Connected to pydev debugger (build 201.7223.92)
http://localhost:30801/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 0 -- total: 100717 -- current_count: 20000 -- duration: 76.297046
first_item: {'depth': -99999.0, 'latitude': 31.8, 'longitude': -80.5, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O3HNHB', 'platform': {'type': '5', 'code': '30', 'id': 'WTEA'}, 'time': '2017-10-22T02:58:48Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 70.3, 'relative_humidity_quality': 1, 'air_temperature': 25.4, 'air_temperature_quality': 1, 'eastward_wind': 0.9, 'northward_wind': -5.0, 'wind_component_quality': 1, 'wind_from_direction': 100.0, 'wind_from_direction_quality': 1, 'wind_speed': 5.1, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '377beee2-1eef-4be5-804e-f84440f008a7'}
http://localhost:30801/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 20000 -- total: 100717 -- current_count: 20000 -- duration: 34.533613
first_item: {'depth': -99999.0, 'latitude': 44.8, 'longitude': -75.4, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O1UI93', 'platform': {'type': '5', 'code': '30', 'id': 'VAAP'}, 'time': '2017-10-08T12:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 83.6, 'relative_humidity_quality': 1, 'air_temperature': 22.2, 'air_temperature_quality': 1, 'eastward_wind': 13.3, 'northward_wind': 7.7, 'wind_component_quality': 1, 'wind_from_direction': 210.0, 'wind_from_direction_quality': 1, 'wind_speed': 15.4, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': 'f99c043e-9307-4ba6-b4e0-5bad53d476ba'}
http://localhost:30801/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 40000 -- total: 100717 -- current_count: 20000 -- duration: 37.628303
first_item: {'depth': -99999.0, 'latitude': 13.9, 'longitude': 51.7, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O1B8EG', 'platform': {'type': '5', 'code': '30', 'id': 'DFGN2'}, 'time': '2017-10-04T03:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 66.8, 'relative_humidity_quality': 1, 'air_temperature': 27.1, 'air_temperature_quality': 1, 'eastward_wind': 3.5, 'northward_wind': -3.0, 'wind_component_quality': 1, 'wind_from_direction': 140.0, 'wind_from_direction_quality': 1, 'wind_speed': 4.6, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '84501f83-2600-413d-888e-a2d95ebc36e4'}
http://localhost:30801/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 60000 -- total: 100717 -- current_count: 20000 -- duration: 36.250365
first_item: {'depth': -99999.0, 'latitude': 49.6, 'longitude': -46.8, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O1JI7U', 'platform': {'type': '5', 'code': '30', 'id': 'VOFG'}, 'time': '2017-10-06T00:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 99.3, 'relative_humidity_quality': 1, 'air_temperature': 11.5, 'air_temperature_quality': 1, 'eastward_wind': 7.3, 'northward_wind': 8.7, 'wind_component_quality': 1, 'wind_from_direction': 230.0, 'wind_from_direction_quality': 1, 'wind_speed': 11.3, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '10c7a1f0-4681-4b58-9af4-b836613bd782'}
http://localhost:30801/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 80000 -- total: 100717 -- current_count: 20000 -- duration: 39.197329
first_item: {'depth': -99999.0, 'latitude': 43.8, 'longitude': -60.6, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O2YF5E', 'platform': {'type': '5', 'code': '30', 'id': 'CFL24'}, 'time': '2017-10-17T16:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 75.3, 'relative_humidity_quality': 1, 'air_temperature': 10.8, 'air_temperature_quality': 1, 'eastward_wind': -11.3, 'northward_wind': 0.0, 'wind_component_quality': 1, 'wind_from_direction': 360.0, 'wind_from_direction_quality': 1, 'wind_speed': 11.3, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': 'a2f3408c-aaa1-476d-9a2d-e7af2d9d6f3e'}
http://localhost:30801/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 100000 -- total: 100717 -- current_count: 717 -- duration: 22.584522
first_item: {'depth': -99999.0, 'latitude': 61.7, 'longitude': -49.6, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O355II', 'platform': {'type': '5', 'code': '30', 'id': 'BATEU05'}, 'time': '2017-10-19T05:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 76.8, 'relative_humidity_quality': 1, 'air_temperature': 0.4, 'air_temperature_quality': 1, 'eastward_wind': -8.2, 'northward_wind': 3.0, 'wind_component_quality': 1, 'wind_from_direction': 340.0, 'wind_from_direction_quality': 1, 'wind_speed': 8.7, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': 'd537d2b6-04bb-4496-836f-2053334ee5f8'}
http://localhost:30801/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 120000 -- total: 100717 -- current_count: 0 -- duration: 14.673852
Connected to pydev debugger (build 201.7223.92)
http://localhost:30801/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 0 -- total: 100717 -- current_count: 20000 -- duration: 21.513823
first_item: {'depth': -99999.0, 'latitude': 71.3, 'longitude': 22.3, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O4FT8M', 'platform': {'type': '5', 'code': '30', 'id': 'LF8G'}, 'time': '2017-10-30T00:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 63.9, 'relative_humidity_quality': 1, 'air_temperature': 1.6, 'air_temperature_quality': 1, 'eastward_wind': -3.7, 'northward_wind': 4.4, 'wind_component_quality': 1, 'wind_from_direction': 310.0, 'wind_from_direction_quality': 1, 'wind_speed': 5.7, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '601ca9e6-0bdc-43ae-8872-be9edda4ae36'}
http://localhost:30801/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 20000 -- total: 100717 -- current_count: 20000 -- duration: 23.372421
first_item: {'depth': -99999.0, 'latitude': 45.3, 'longitude': -80.0, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O1XLK2', 'platform': {'type': '5', 'code': '30', 'id': 'CG2960'}, 'time': '2017-10-09T05:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 93.7, 'relative_humidity_quality': 1, 'air_temperature': 14.4, 'air_temperature_quality': 1, 'eastward_wind': -0.4, 'northward_wind': -0.3, 'wind_component_quality': 1, 'wind_from_direction': 40.0, 'wind_from_direction_quality': 1, 'wind_speed': 0.5, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': 'cfcc6ccf-db9e-49a9-8f13-6aad127390b0'}
http://localhost:30801/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 40000 -- total: 100717 -- current_count: 20000 -- duration: 30.2876
first_item: {'depth': -99999.0, 'latitude': 15.8, 'longitude': 84.5, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O1B9J0', 'platform': {'type': '5', 'code': '30', 'id': 'AUYB'}, 'time': '2017-10-04T03:10:12Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 86.0, 'relative_humidity_quality': 1, 'air_temperature': 28.9, 'air_temperature_quality': 1, 'eastward_wind': 2.5, 'northward_wind': 4.3, 'wind_component_quality': 1, 'wind_from_direction': 240.0, 'wind_from_direction_quality': 1, 'wind_speed': 5.0, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '84501f83-2600-413d-888e-a2d95ebc36e4'}
http://localhost:30801/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 60000 -- total: 100717 -- current_count: 20000 -- duration: 26.036587
first_item: {'depth': -99999.0, 'latitude': 18.9, 'longitude': 72.8, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O45N8E', 'platform': {'type': '5', 'code': '30', 'id': '8TAO'}, 'time': '2017-10-27T15:19:12Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 70.0, 'relative_humidity_quality': 1, 'air_temperature': 30.0, 'air_temperature_quality': 1, 'eastward_wind': 0.2, 'northward_wind': 1.0, 'wind_component_quality': 1, 'wind_from_direction': 260.0, 'wind_from_direction_quality': 1, 'wind_speed': 1.0, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': 'e07d4d13-6807-4d1e-82f2-875495dc69d4'}
http://localhost:30801/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 80000 -- total: 100717 -- current_count: 20000 -- duration: 41.069749
first_item: {'depth': -99999.0, 'latitude': 60.1, 'longitude': -61.3, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O3OD79', 'platform': {'type': '5', 'code': '30', 'id': 'VAAZ'}, 'time': '2017-10-23T16:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 80.9, 'relative_humidity_quality': 1, 'air_temperature': 0.8, 'air_temperature_quality': 1, 'eastward_wind': 7.5, 'northward_wind': -4.4, 'wind_component_quality': 1, 'wind_from_direction': 150.0, 'wind_from_direction_quality': 1, 'wind_speed': 8.7, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '3f4af645-1a43-4c40-8c42-7e2249a7626a'}
http://localhost:30801/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 100000 -- total: 100717 -- current_count: 717 -- duration: 21.975566
first_item: {'depth': -99999.0, 'latitude': 50.0, 'longitude': -2.2, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=O109GD', 'platform': {'type': '5', 'code': '30', 'id': 'AMOUK50'}, 'time': '2017-10-01T15:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 98.1, 'relative_humidity_quality': 1, 'air_temperature': 16.2, 'air_temperature_quality': 1, 'eastward_wind': None, 'northward_wind': None, 'wind_component_quality': None, 'wind_from_direction': None, 'wind_from_direction_quality': None, 'wind_speed': None, 'wind_speed_quality': None, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '682f795e-cee5-4796-a4c3-ebb7d09cdc41'}
http://localhost:30801/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-10-01T00:00:00Z&endTime=2017-10-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-10-01T00:00:00Z - 2017-10-30T00:00:00Z -- start_index: 120000 -- total: 100717 -- current_count: 0 -- duration: 14.142189
Connected to pydev debugger (build 201.7223.92)
http://localhost:30801/1.0/query_data_doms?startIndex=0&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 0 -- total: 105140 -- current_count: 20000 -- duration: 32.397474
first_item: {'depth': -99999.0, 'latitude': 70.8, 'longitude': 30.8, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=NAVHX0', 'platform': {'type': '5', 'code': '30', 'id': 'LAHV'}, 'time': '2017-03-09T21:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 71.2, 'relative_humidity_quality': 1, 'air_temperature': 0.0, 'air_temperature_quality': 1, 'eastward_wind': 0.9, 'northward_wind': 4.9, 'wind_component_quality': 1, 'wind_from_direction': 260.0, 'wind_from_direction_quality': 1, 'wind_speed': 5.0, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '2e9bbe86-beef-4584-8c08-215bd5f380f0'}
http://localhost:30801/1.0/query_data_doms?startIndex=20000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 20000 -- total: 105140 -- current_count: 20000 -- duration: 36.676376
first_item: {'depth': -99999.0, 'latitude': 43.0, 'longitude': 6.8, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=NA9PQ6', 'platform': {'type': '5', 'code': '30', 'id': 'BATFR66'}, 'time': '2017-03-05T02:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 62.8, 'relative_humidity_quality': 1, 'air_temperature': 10.7, 'air_temperature_quality': 1, 'eastward_wind': -2.0, 'northward_wind': 11.1, 'wind_component_quality': 1, 'wind_from_direction': 280.0, 'wind_from_direction_quality': 1, 'wind_speed': 11.3, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '153ead85-10b6-4dd1-98f5-dc932e4aab09'}
http://localhost:30801/1.0/query_data_doms?startIndex=40000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 40000 -- total: 105140 -- current_count: 20000 -- duration: 43.398249
first_item: {'depth': -99999.0, 'latitude': 50.1, 'longitude': -53.8, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=NCWSR4', 'platform': {'type': '5', 'code': '30', 'id': 'CGCX'}, 'time': '2017-03-26T02:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 72.6, 'relative_humidity_quality': 1, 'air_temperature': -6.7, 'air_temperature_quality': 1, 'eastward_wind': -4.6, 'northward_wind': 5.5, 'wind_component_quality': 1, 'wind_from_direction': 310.0, 'wind_from_direction_quality': 1, 'wind_speed': 7.2, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '0c2169ee-d829-4855-8f7e-730f53722bfe'}
http://localhost:30801/1.0/query_data_doms?startIndex=60000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 60000 -- total: 105140 -- current_count: 20000 -- duration: 37.260118
first_item: {'depth': -99999.0, 'latitude': 45.9, 'longitude': -73.2, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=NA2GFN', 'platform': {'type': '5', 'code': '30', 'id': 'VCBW'}, 'time': '2017-03-03T12:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 60.9, 'relative_humidity_quality': 1, 'air_temperature': -14.1, 'air_temperature_quality': 4, 'eastward_wind': -0.4, 'northward_wind': 0.3, 'wind_component_quality': 1, 'wind_from_direction': 320.0, 'wind_from_direction_quality': 1, 'wind_speed': 0.5, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '2d609a98-ca92-4b88-8d8b-c3d080765f4d'}
http://localhost:30801/1.0/query_data_doms?startIndex=80000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 80000 -- total: 105140 -- current_count: 20000 -- duration: 66.527481
first_item: {'depth': -99999.0, 'latitude': 24.0, 'longitude': -74.9, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=NCXDXR', 'platform': {'type': '5', 'code': '30', 'id': 'J8NW'}, 'time': '2017-03-26T05:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 91.3, 'relative_humidity_quality': 1, 'air_temperature': 24.0, 'air_temperature_quality': 1, 'eastward_wind': 0.0, 'northward_wind': -9.3, 'wind_component_quality': 1, 'wind_from_direction': 90.0, 'wind_from_direction_quality': 1, 'wind_speed': 9.3, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': '0c2169ee-d829-4855-8f7e-730f53722bfe'}
http://localhost:30801/1.0/query_data_doms?startIndex=100000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 100000 -- total: 105140 -- current_count: 5140 -- duration: 39.512695
first_item: {'depth': -99999.0, 'latitude': 69.6, 'longitude': 18.9, 'meta': 'https://rda.ucar.edu/php/icoadsuid.php?uid=NCMRFN', 'platform': {'type': '5', 'code': '30', 'id': 'LGWS'}, 'time': '2017-03-23T21:00:00Z', 'provider': 'NCAR', 'project': 'ICOADS Release 3.0', 'platform_code': '30', 'relative_humidity': 89.5, 'relative_humidity_quality': 1, 'air_temperature': -1.0, 'air_temperature_quality': 1, 'eastward_wind': 1.5, 'northward_wind': 1.3, 'wind_component_quality': 1, 'wind_from_direction': 220.0, 'wind_from_direction_quality': 1, 'wind_speed': 2.0, 'wind_speed_quality': 1, 'air_pressure': None, 'air_pressure_quality': None, 'job_id': 'c2fcc1a5-55ad-4921-a961-e20c9646327f'}
http://localhost:30801/1.0/query_data_doms?startIndex=120000&itemsPerPage=20000&provider=NCAR&project=ICOADS Release 3.0&platform=30,41,42&variable=relative_humidity&minDepth=-99&maxDepth=0&startTime=2017-03-01T00:00:00Z&endTime=2017-04-30T00:00:00Z&bbox=-111,11,111,99
time: 2017-03-01T00:00:00Z - 2017-04-30T00:00:00Z -- start_index: 120000 -- total: 105140 -- current_count: 0 -- duration: 23.947456
:return:
"""
self.__start_time = '2017-05-01T00:00:00Z'
self.__end_time = '2017-07-30T00:00:00Z'
self.__platform_code = '41'
self.__variable = None
self.__columns = None
self.__start_index = 0
self.__size = 20000
self.__columns = None
response = self.__execute_query()
print(f'time: {self.__start_time} - {self.__end_time} -- start_index: {self.__start_index} -- total: {response[0]["total"]} -- current_count: {len(response[0]["results"])} -- duration: {response[1]}')
if len(response[0]["results"]) > 0:
print(f'first_item: {response[0]["results"][0]}')
total = response[0]['total']
while self.__start_index < total:
self.__start_index += self.__size
response = self.__execute_query()
print(f'time: {self.__start_time} - {self.__end_time} -- start_index: {self.__start_index} -- total: {response[0]["total"]} -- current_count: {len(response[0]["results"])} -- duration: {response[1]}')
if len(response[0]["results"]) > 0:
print(f'first_item: {response[0]["results"][0]}')
return
def custom_pagination_bench_mark(self):
self.__start_time = '2018-08-30T00:00:00Z'
self.__end_time = '2018-08-31T00:00:00Z'
# self.__platform_code = '42,41,30,16,17'
self.__platform_code = '42'
self.__min_lat_lon = (-25.2, 168.8)
self.__max_lat_lon = (-25.1, 169.0)
# self.__min_depth = -99
# self.__max_depth = 0
# self.__min_lat_lon = (-111, 11)
# self.__max_lat_lon = (111, 99)
# self.__provider = 'Florida State University, COAPS'
# self.__project = 'SAMOS'
# self.__platform_code = '30'
# self.__provider = 'Saildrone'
# self.__project = '1021_atlantic'
# self.__platform_code = '3B'
# self.__start_time = '2019-10-01T00:00:00Z'
# self.__end_time = '2019-10-16T00:00:00Z'
#
self.__variable = None
self.__columns = None
self.__start_index = 0
self.__size = 20000
self.__columns = None
response = self.__execute_query_custom_pagination()
print(f'time: {self.__start_time} - {self.__end_time} -- start_index: {self.__start_index} -- total: {response[0]["total"]} -- current_count: {len(response[0]["results"])} -- duration: {response[1]}')
while response[0]['next'] != 'NA':
if len(response[0]['results']) < 1:
print('empty result set. breaking')
break
print(f'first_item: {response[0]["results"][0]}')
print(f'last_item: {response[0]["results"][-1]}')
response = self.__execute_blind_query(response[0]['next'])
print(f'time: {self.__start_time} - {self.__end_time} -- start_index: {self.__start_index} -- total: {response[0]["total"]} -- current_count: {len(response[0]["results"])} -- duration: {response[1]}')
return
def time_bench_mark(self):
"""
time: 2017-01-01T00:00:00Z - 2017-01-02T00:00:00Z -- total: 8316 -- duration: 105.139927
time: 2017-12-01T00:00:00Z - 2017-12-16T00:00:00Z -- total: 59753 -- duration: 72.037163
time: 2017-02-01T00:00:00Z - 2017-02-28T00:00:00Z -- total: 104602 -- duration: 67.783443
time: 2017-04-01T00:00:00Z - 2017-05-30T00:00:00Z -- total: 380510 -- duration: 112.183817
time: 2017-06-01T00:00:00Z - 2017-08-30T00:00:00Z -- total: 661753 -- duration: 145.768916
time: 2017-01-01T00:00:00Z - 2017-06-30T00:00:00Z -- total: 979690 -- duration: 251.343631
:return:
"""
self.__min_depth = -99
self.__max_depth = 0
self.__min_lat_lon = (-111, 11)
self.__max_lat_lon = (111, 99)
self.__provider = 'Florida State University, COAPS'
self.__project = 'SAMOS'
self.__platform_code = '30'
self.__start_index = 10
# self.__start_time = '2017-01-01T00:00:00Z'
# self.__end_time = '2017-01-02T00:00:00Z'
# response = self.__execute_query()
# print(f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- duration: {response[1]}')
self.__start_time = '2017-12-01T00:00:00Z'
self.__end_time = '2017-12-16T00:00:00Z'
response = self.__execute_query()
print(
f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- current_count: {len(response[0]["results"])} -- duration: {response[1]} -- first_item: {response[0]["results"][0]}')
# raise ValueError('not yet')
self.__start_time = '2017-02-01T00:00:00Z'
self.__end_time = '2017-02-28T00:00:00Z'
response = self.__execute_query()
print(
f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- duration: {response[1]}')
self.__start_time = '2017-04-01T00:00:00Z'
self.__end_time = '2017-05-30T00:00:00Z'
response = self.__execute_query()
print(
f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- duration: {response[1]}')
self.__start_time = '2017-06-01T00:00:00Z'
self.__end_time = '2017-08-30T00:00:00Z'
response = self.__execute_query()
print(
f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- duration: {response[1]}')
self.__start_time = '2017-01-01T00:00:00Z'
self.__end_time = '2017-06-30T00:00:00Z'
response = self.__execute_query()
print(
f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- duration: {response[1]}')
self.__start_time = '2017-01-01T00:00:00Z'
self.__end_time = '2017-12-30T00:00:00Z'
response = self.__execute_query()
print(
f'time: {self.__start_time} - {self.__end_time} -- total: {response[0]["total"]} -- duration: {response[1]}')
return
def depth_bench_mark(self):
return
def bbox_bench_mark(self):
return
def samos_test(self):
"""
provider=Florida State University, COAPS/
project=SAMOS/
platform_code=30/
:return:
"""
self.__variable = 'relative_humidity'
self.__columns = None
self.__start_time = '2017-01-01T00:00:00Z'
self.__end_time = '2017-01-03T00:00:00Z'
self.__min_depth = -99
self.__max_depth = 0
self.__min_lat_lon = (-111, 11)
self.__max_lat_lon = (111, 99)
self.__provider = 'Florida State University, COAPS'
self.__project = 'SAMOS'
self.__platform_code = '301'
print(self.__execute_query())
return
if __name__ == '__main__':
BenchMark().custom_pagination_bench_mark()
| 137.261417
| 704
| 0.746033
| 13,301
| 87,161
| 4.747387
| 0.052177
| 0.033098
| 0.029298
| 0.034587
| 0.910999
| 0.892248
| 0.843091
| 0.806018
| 0.782469
| 0.776546
| 0
| 0.203122
| 0.08412
| 87,161
| 634
| 705
| 137.477918
| 0.587882
| 0.874026
| 0
| 0.638743
| 0
| 0.094241
| 0.444044
| 0.234267
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057592
| false
| 0
| 0.015707
| 0.010471
| 0.13089
| 0.104712
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6d957e525b37b3320a686c122a14ce22d5fcef6c
| 40,172
|
py
|
Python
|
tests/models/test_prefix_graph.py
|
areading314/conda
|
c9aa50360af308048f57bc2d1c9ae5707e057e3b
|
[
"BSD-3-Clause"
] | 1
|
2021-03-19T23:50:53.000Z
|
2021-03-19T23:50:53.000Z
|
tests/models/test_prefix_graph.py
|
areading314/conda
|
c9aa50360af308048f57bc2d1c9ae5707e057e3b
|
[
"BSD-3-Clause"
] | 1
|
2019-04-02T23:35:13.000Z
|
2019-04-02T23:35:13.000Z
|
tests/models/test_prefix_graph.py
|
areading314/conda
|
c9aa50360af308048f57bc2d1c9ae5707e057e3b
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from conda._vendor.auxlib.decorators import memoize
from conda.base.context import reset_context
from conda.common.io import env_var
from conda.exceptions import CyclicalDependencyError
from conda.models.match_spec import MatchSpec
import conda.models.prefix_graph
from conda.models.prefix_graph import PrefixGraph
import pytest
from tests.core.test_solve import get_solver_4, get_solver_5
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
@memoize
def get_conda_build_record_set():
specs = MatchSpec("conda"), MatchSpec("conda-build"), MatchSpec("intel-openmp"),
with get_solver_4(specs) as solver:
final_state = solver.solve_final_state()
return final_state, frozenset(specs)
@memoize
def get_pandas_record_set():
specs = MatchSpec("pandas"), MatchSpec("python=2.7"), MatchSpec("numpy 1.13")
with get_solver_4(specs) as solver:
final_state = solver.solve_final_state()
return final_state, frozenset(specs)
@memoize
def get_windows_conda_build_record_set():
specs = (MatchSpec("conda"), MatchSpec("conda-build"), MatchSpec("affine"),
MatchSpec("colour"), MatchSpec("uses-spiffy-test-app"),)
with get_solver_5(specs) as solver:
final_state = solver.solve_final_state()
return final_state, frozenset(specs)
@memoize
def get_sqlite_cyclical_record_set():
# sqlite-3.20.1-haaaaaaa_4
specs = MatchSpec("sqlite=3.20.1[build_number=4]"), MatchSpec("flask"),
with get_solver_4(specs) as solver:
final_state = solver.solve_final_state()
return final_state, frozenset(specs)
def test_prefix_graph_1():
# Basic initial test for public methods of PrefixGraph.
records, specs = get_conda_build_record_set()
graph = PrefixGraph(records, specs)
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-4::intel-openmp-2018.0.0-hc7b2577_8',
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
'channel-4::conda-4.4.10-py36_0',
'channel-4::conda-build-3.5.1-py36_0',
)
assert nodes == order
python_node = graph.get_node_by_name('python')
python_ancestors = graph.all_ancestors(python_node)
nodes = tuple(rec.dist_str() for rec in python_ancestors)
print(nodes)
order = (
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
)
assert nodes == order
python_descendants = graph.all_descendants(python_node)
nodes = tuple(rec.dist_str() for rec in python_descendants)
print(nodes)
order = (
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
'channel-4::conda-4.4.10-py36_0',
'channel-4::conda-build-3.5.1-py36_0',
)
assert nodes == order
# test remove_specs
removed_nodes = graph.remove_spec(MatchSpec("requests"))
nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(nodes)
order = (
'channel-4::requests-2.18.4-py36he2e5f8d_1',
'channel-4::conda-4.4.10-py36_0',
'channel-4::conda-build-3.5.1-py36_0',
)
assert nodes == order
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::intel-openmp-2018.0.0-hc7b2577_8',
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
)
assert nodes == order
spec_matches = {
'channel-4::intel-openmp-2018.0.0-hc7b2577_8': {'intel-openmp'},
}
assert {node.dist_str(): set(str(ms) for ms in specs) for node, specs in graph.spec_matches.items()} == spec_matches
removed_nodes = graph.prune()
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-4::intel-openmp-2018.0.0-hc7b2577_8',
)
assert nodes == order
order = (
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
)
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == order
def test_prefix_graph_2():
records, specs = get_conda_build_record_set()
graph = PrefixGraph(records, specs)
conda_build_node = graph.get_node_by_name('conda-build')
del graph.spec_matches[conda_build_node]
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-4::intel-openmp-2018.0.0-hc7b2577_8',
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
'channel-4::conda-4.4.10-py36_0',
'channel-4::conda-build-3.5.1-py36_0',
)
assert nodes == order
removed_nodes = graph.prune()
remaining_nodes = tuple(rec.dist_str() for rec in graph.records)
print(remaining_nodes)
order = (
'channel-4::intel-openmp-2018.0.0-hc7b2577_8',
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
'channel-4::conda-4.4.10-py36_0',
)
assert remaining_nodes == order
order = (
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::conda-build-3.5.1-py36_0',
)
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == order
def test_remove_youngest_descendant_nodes_with_specs():
records, specs = get_conda_build_record_set()
graph = PrefixGraph(records, tuple(specs) + (MatchSpec("requests"),))
removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()
remaining_nodes = tuple(rec.dist_str() for rec in graph.records)
print(remaining_nodes)
order = (
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
'channel-4::conda-4.4.10-py36_0',
)
assert remaining_nodes == order
order = (
'channel-4::intel-openmp-2018.0.0-hc7b2577_8',
'channel-4::conda-build-3.5.1-py36_0',
)
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == order
# again
removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()
remaining_nodes = tuple(rec.dist_str() for rec in graph.records)
print(remaining_nodes)
order = (
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
)
assert remaining_nodes == order
order = (
'channel-4::conda-4.4.10-py36_0',
)
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == order
# now test prune
removed_nodes = graph.prune()
remaining_nodes = tuple(rec.dist_str() for rec in graph.records)
print(remaining_nodes)
order = (
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::sqlite-3.22.0-h1bed415_0',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::asn1crypto-0.24.0-py36_0',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::chardet-3.0.4-py36h0f667ec_1',
'channel-4::idna-2.6-py36h82fb2a8_1',
'channel-4::pycparser-2.18-py36hf9f622e_1',
'channel-4::pysocks-1.6.7-py36hd97a5b1_1',
'channel-4::six-1.11.0-py36h372c433_1',
'channel-4::cffi-1.11.4-py36h9745a5d_0',
'channel-4::cryptography-2.1.4-py36hd09be54_0',
'channel-4::pyopenssl-17.5.0-py36h20ba746_0',
'channel-4::urllib3-1.22-py36hbe7ace6_0',
'channel-4::requests-2.18.4-py36he2e5f8d_1',
)
assert remaining_nodes == order
order = (
'channel-4::conda-env-2.6.0-h36134e3_1',
'channel-4::patchelf-0.9-hf79760b_2',
'channel-4::yaml-0.1.7-had09818_2',
'channel-4::beautifulsoup4-4.6.0-py36h49b8c8c_1',
'channel-4::filelock-3.0.4-py36_0',
'channel-4::glob2-0.6-py36he249c77_0',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::pkginfo-1.4.1-py36h215d178_1',
'channel-4::psutil-5.4.3-py36h14c3975_0',
'channel-4::pycosat-0.6.3-py36h0a5515d_0',
'channel-4::pyyaml-3.12-py36hafb9ca4_1',
'channel-4::ruamel_yaml-0.15.35-py36h14c3975_1',
'channel-4::conda-verify-2.0.0-py36h98955d8_0',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::jinja2-2.10-py36ha16c418_0',
)
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == order
def test_windows_sort_orders_1():
# This test makes sure the windows-specific parts of _toposort_prepare_graph
# are behaving correctly.
old_on_win = conda.models.prefix_graph.on_win
conda.models.prefix_graph.on_win = True
try:
records, specs = get_windows_conda_build_record_set()
graph = PrefixGraph(records, specs)
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-5::ca-certificates-2017.08.26-h94faf87_0',
'channel-5::conda-env-2.6.0-h36134e3_1',
'channel-5::vs2015_runtime-14.0.25123-3',
'channel-5::vc-14-h0510ff6_3',
'channel-5::openssl-1.0.2n-h74b6da3_0',
'channel-5::python-3.6.4-h6538335_1',
'channel-5::yaml-0.1.7-hc54c509_2',
'channel-5::pywin32-222-py36hfa6e2cd_0',
'channel-5::menuinst-1.4.11-py36hfa6e2cd_0', # on_win, menuinst should be very early
'channel-5::asn1crypto-0.24.0-py36_0',
'channel-5::beautifulsoup4-4.6.0-py36hd4cc5e8_1',
'channel-5::certifi-2018.1.18-py36_0',
'channel-5::chardet-3.0.4-py36h420ce6e_1',
'channel-5::filelock-3.0.4-py36_0',
'channel-5::glob2-0.6-py36hdf76b57_0',
'channel-5::idna-2.6-py36h148d497_1',
'channel-5::markupsafe-1.0-py36h0e26971_1',
'channel-5::pkginfo-1.4.1-py36hb0f9cfa_1',
'channel-5::psutil-5.4.3-py36hfa6e2cd_0',
'channel-5::pycosat-0.6.3-py36h413d8a4_0',
'channel-5::pycparser-2.18-py36hd053e01_1',
'channel-5::pyyaml-3.12-py36h1d1928f_1',
'channel-5::ruamel_yaml-0.15.35-py36hfa6e2cd_1',
'channel-5::six-1.11.0-py36h4db2310_1',
'channel-5::win_inet_pton-1.0.1-py36he67d7fd_1',
'channel-5::wincertstore-0.2-py36h7fe50ca_0',
'channel-5::cffi-1.11.4-py36hfa6e2cd_0',
'channel-5::conda-verify-2.0.0-py36h065de53_0',
'channel-5::pysocks-1.6.8-py36_0',
'channel-5::setuptools-38.5.1-py36_0',
'channel-5::cryptography-2.1.4-py36he1d7878_0',
'channel-5::jinja2-2.10-py36h292fed1_0',
'channel-5::wheel-0.30.0-py36h6c3ec14_1',
'channel-5::pip-9.0.1-py36h226ae91_4', # pip always comes after python
'channel-5::pyopenssl-17.5.0-py36h5b7d817_0',
'channel-5::urllib3-1.22-py36h276f60a_0',
'channel-5::requests-2.18.4-py36h4371aae_1',
'channel-5::conda-4.4.11-py36_0', # on_win, conda comes before all noarch: python packages (affine, colour, spiffy-test-app, uses-spiffy-test-app)
'channel-5::affine-2.1.0-pyh128a3a6_1',
'channel-5::colour-0.1.4-pyhd67b51d_0',
'channel-5::conda-build-3.5.1-py36_0',
'channel-5::spiffy-test-app-0.5-pyh6afbcc8_0',
'channel-5::uses-spiffy-test-app-2.0-pyh18698f2_0',
)
assert nodes == order
finally:
conda.models.prefix_graph.on_win = old_on_win
def test_windows_sort_orders_2():
# This test makes sure the windows-specific parts of _toposort_prepare_graph
# are behaving correctly.
with env_var('CONDA_ALLOW_CYCLES', 'false', reset_context):
old_on_win = conda.models.prefix_graph.on_win
conda.models.prefix_graph.on_win = False
try:
records, specs = get_windows_conda_build_record_set()
graph = PrefixGraph(records, specs)
python_node = graph.get_node_by_name('python')
pip_node = graph.get_node_by_name('pip')
assert pip_node in graph.graph[python_node]
assert python_node in graph.graph[pip_node]
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-5::ca-certificates-2017.08.26-h94faf87_0',
'channel-5::conda-env-2.6.0-h36134e3_1',
'channel-5::vs2015_runtime-14.0.25123-3',
'channel-5::vc-14-h0510ff6_3',
'channel-5::openssl-1.0.2n-h74b6da3_0',
'channel-5::python-3.6.4-h6538335_1',
'channel-5::yaml-0.1.7-hc54c509_2',
'channel-5::affine-2.1.0-pyh128a3a6_1',
'channel-5::asn1crypto-0.24.0-py36_0',
'channel-5::beautifulsoup4-4.6.0-py36hd4cc5e8_1',
'channel-5::certifi-2018.1.18-py36_0',
'channel-5::chardet-3.0.4-py36h420ce6e_1',
'channel-5::colour-0.1.4-pyhd67b51d_0',
'channel-5::filelock-3.0.4-py36_0',
'channel-5::glob2-0.6-py36hdf76b57_0',
'channel-5::idna-2.6-py36h148d497_1',
'channel-5::markupsafe-1.0-py36h0e26971_1',
'channel-5::pkginfo-1.4.1-py36hb0f9cfa_1',
'channel-5::psutil-5.4.3-py36hfa6e2cd_0',
'channel-5::pycosat-0.6.3-py36h413d8a4_0',
'channel-5::pycparser-2.18-py36hd053e01_1',
'channel-5::pywin32-222-py36hfa6e2cd_0',
'channel-5::pyyaml-3.12-py36h1d1928f_1',
'channel-5::ruamel_yaml-0.15.35-py36hfa6e2cd_1',
'channel-5::six-1.11.0-py36h4db2310_1',
'channel-5::spiffy-test-app-0.5-pyh6afbcc8_0',
'channel-5::win_inet_pton-1.0.1-py36he67d7fd_1',
'channel-5::wincertstore-0.2-py36h7fe50ca_0',
'channel-5::cffi-1.11.4-py36hfa6e2cd_0',
'channel-5::conda-verify-2.0.0-py36h065de53_0',
'channel-5::menuinst-1.4.11-py36hfa6e2cd_0', # not on_win, menuinst isn't changed
'channel-5::pysocks-1.6.8-py36_0',
'channel-5::setuptools-38.5.1-py36_0',
'channel-5::uses-spiffy-test-app-2.0-pyh18698f2_0',
'channel-5::cryptography-2.1.4-py36he1d7878_0',
'channel-5::jinja2-2.10-py36h292fed1_0',
'channel-5::wheel-0.30.0-py36h6c3ec14_1',
'channel-5::pip-9.0.1-py36h226ae91_4', # pip always comes after python
'channel-5::pyopenssl-17.5.0-py36h5b7d817_0',
'channel-5::urllib3-1.22-py36h276f60a_0',
'channel-5::requests-2.18.4-py36h4371aae_1',
'channel-5::conda-4.4.11-py36_0', # not on_win, no special treatment for noarch: python packages (affine, colour, spiffy-test-app, uses-spiffy-test-app)
'channel-5::conda-build-3.5.1-py36_0',
)
assert nodes == order
finally:
conda.models.prefix_graph.on_win = old_on_win
def test_sort_without_prep():
# Test the _toposort_prepare_graph method, here by not running it at all.
# The method is invoked in every other test. This is what happens when it's not invoked.
with patch.object(conda.models.prefix_graph.PrefixGraph, '_toposort_prepare_graph', return_value=None):
records, specs = get_windows_conda_build_record_set()
graph = PrefixGraph(records, specs)
python_node = graph.get_node_by_name('python')
pip_node = graph.get_node_by_name('pip')
assert pip_node in graph.graph[python_node]
assert python_node in graph.graph[pip_node]
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-5::ca-certificates-2017.08.26-h94faf87_0',
'channel-5::conda-env-2.6.0-h36134e3_1',
'channel-5::vs2015_runtime-14.0.25123-3',
'channel-5::vc-14-h0510ff6_3',
'channel-5::openssl-1.0.2n-h74b6da3_0',
'channel-5::yaml-0.1.7-hc54c509_2',
'channel-5::affine-2.1.0-pyh128a3a6_1',
'channel-5::asn1crypto-0.24.0-py36_0',
'channel-5::beautifulsoup4-4.6.0-py36hd4cc5e8_1',
'channel-5::certifi-2018.1.18-py36_0',
'channel-5::chardet-3.0.4-py36h420ce6e_1',
'channel-5::colour-0.1.4-pyhd67b51d_0',
'channel-5::filelock-3.0.4-py36_0',
'channel-5::glob2-0.6-py36hdf76b57_0',
'channel-5::idna-2.6-py36h148d497_1',
'channel-5::markupsafe-1.0-py36h0e26971_1',
'channel-5::pkginfo-1.4.1-py36hb0f9cfa_1',
'channel-5::psutil-5.4.3-py36hfa6e2cd_0',
'channel-5::pycosat-0.6.3-py36h413d8a4_0',
'channel-5::pycparser-2.18-py36hd053e01_1',
'channel-5::cffi-1.11.4-py36hfa6e2cd_0',
'channel-5::python-3.6.4-h6538335_1',
'channel-5::pywin32-222-py36hfa6e2cd_0',
'channel-5::pyyaml-3.12-py36h1d1928f_1',
'channel-5::ruamel_yaml-0.15.35-py36hfa6e2cd_1',
'channel-5::six-1.11.0-py36h4db2310_1',
'channel-5::spiffy-test-app-0.5-pyh6afbcc8_0',
'channel-5::win_inet_pton-1.0.1-py36he67d7fd_1',
'channel-5::wincertstore-0.2-py36h7fe50ca_0',
'channel-5::conda-verify-2.0.0-py36h065de53_0',
'channel-5::cryptography-2.1.4-py36he1d7878_0',
'channel-5::menuinst-1.4.11-py36hfa6e2cd_0',
'channel-5::pysocks-1.6.8-py36_0',
'channel-5::setuptools-38.5.1-py36_0',
'channel-5::uses-spiffy-test-app-2.0-pyh18698f2_0',
'channel-5::jinja2-2.10-py36h292fed1_0',
'channel-5::pyopenssl-17.5.0-py36h5b7d817_0',
'channel-5::wheel-0.30.0-py36h6c3ec14_1',
'channel-5::pip-9.0.1-py36h226ae91_4',
'channel-5::urllib3-1.22-py36h276f60a_0',
'channel-5::requests-2.18.4-py36h4371aae_1',
'channel-5::conda-4.4.11-py36_0',
'channel-5::conda-build-3.5.1-py36_0',
)
assert nodes == order
with env_var('CONDA_ALLOW_CYCLES', 'false', reset_context):
records, specs = get_windows_conda_build_record_set()
with pytest.raises(CyclicalDependencyError):
graph = PrefixGraph(records, specs)
def test_deep_cyclical_dependency():
# Basically, the whole purpose of this test is to make sure nothing blows up with
# recursion errors or anything like that. Cyclical dependencies will always lead to
# problems, and the tests here document the behavior.
# "sqlite-3.20.1-haaaaaaa_4.tar.bz2": {
# "build": "haaaaaaa_4",
# "build_number": 4,
# "depends": [
# "libedit",
# "libgcc-ng >=7.2.0",
# "jinja2 2.9.6"
# ],
# "license": "Public-Domain (http://www.sqlite.org/copyright.html)",
# "md5": "deadbeefdd677bc3ed98ddd4deadbeef",
# "name": "sqlite",
# "sha256": "deadbeefabd915d2f13da177a29e264e59a0ae3c6fd2a31267dcc6a8deadbeef",
# "size": 1540584,
# "subdir": "linux-64",
# "timestamp": 1505666646842,
# "version": "3.20.1"
# },
graph = PrefixGraph(*get_sqlite_cyclical_record_set())
nodes = tuple(rec.dist_str() for rec in graph.records)
print(nodes)
order = (
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::click-6.7-py36h5253387_0',
'channel-4::itsdangerous-0.24-py36h93cc618_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::werkzeug-0.14.1-py36_0',
'channel-4::jinja2-2.9.6-py36h489bce4_1',
'channel-4::flask-0.12.2-py36hb24657c_0',
'channel-4::sqlite-3.20.1-haaaaaaa_4', # deep cyclical dependency; guess this is what we get
)
assert nodes == order
# test remove spec
# because of this deep cyclical dependency, removing jinja2 will remove sqlite and python
expected_removal = (
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::click-6.7-py36h5253387_0',
'channel-4::itsdangerous-0.24-py36h93cc618_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::werkzeug-0.14.1-py36_0',
'channel-4::jinja2-2.9.6-py36h489bce4_1',
'channel-4::flask-0.12.2-py36hb24657c_0',
'channel-4::sqlite-3.20.1-haaaaaaa_4',
)
removed_nodes = graph.remove_spec(MatchSpec("sqlite"))
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == expected_removal
graph = PrefixGraph(*get_sqlite_cyclical_record_set())
removed_nodes = graph.remove_spec(MatchSpec("python"))
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == expected_removal
graph = PrefixGraph(*get_sqlite_cyclical_record_set())
removed_nodes = graph.remove_spec(MatchSpec("jinja2"))
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == expected_removal
graph = PrefixGraph(*get_sqlite_cyclical_record_set())
removed_nodes = graph.remove_spec(MatchSpec("markupsafe"))
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
assert removed_nodes == expected_removal
graph = PrefixGraph(*get_sqlite_cyclical_record_set())
removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
expected_removal = (
'channel-4::flask-0.12.2-py36hb24657c_0',
)
assert removed_nodes == expected_removal
removed_nodes = graph.prune()
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
expected_removal = (
'channel-4::click-6.7-py36h5253387_0',
'channel-4::itsdangerous-0.24-py36h93cc618_1',
'channel-4::werkzeug-0.14.1-py36_0',
)
assert removed_nodes == expected_removal
removed_nodes = graph.remove_youngest_descendant_nodes_with_specs()
removed_nodes = tuple(rec.dist_str() for rec in removed_nodes)
print(removed_nodes)
expected_removal = (
# None, because of the cyclical dependency?
)
assert removed_nodes == expected_removal
graph = PrefixGraph(*get_sqlite_cyclical_record_set())
markupsafe_node = graph.get_node_by_name('markupsafe')
markupsafe_ancestors = graph.all_ancestors(markupsafe_node)
nodes = tuple(rec.dist_str() for rec in markupsafe_ancestors)
print(nodes)
order = (
'channel-4::ca-certificates-2017.08.26-h1d4fec5_0',
'channel-4::libgcc-ng-7.2.0-h7cc24e2_2',
'channel-4::libstdcxx-ng-7.2.0-h7a57d05_2',
'channel-4::libffi-3.2.1-hd88cf55_4',
'channel-4::ncurses-6.0-h9df7e31_2',
'channel-4::openssl-1.0.2n-hb7f436b_0',
'channel-4::tk-8.6.7-hc745277_3',
'channel-4::xz-5.2.3-h55aa19d_2',
'channel-4::zlib-1.2.11-ha838bed_2',
'channel-4::libedit-3.1-heed3624_0',
'channel-4::readline-7.0-ha6073c6_4',
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::jinja2-2.9.6-py36h489bce4_1',
'channel-4::sqlite-3.20.1-haaaaaaa_4',
)
assert nodes == order
markupsafe_descendants = graph.all_descendants(markupsafe_node)
nodes = tuple(rec.dist_str() for rec in markupsafe_descendants)
print(nodes)
order = (
'channel-4::certifi-2018.1.18-py36_0',
'channel-4::click-6.7-py36h5253387_0',
'channel-4::itsdangerous-0.24-py36h93cc618_1',
'channel-4::markupsafe-1.0-py36hd9260cd_1',
'channel-4::python-3.6.4-hc3d631a_1',
'channel-4::setuptools-38.5.1-py36_0',
'channel-4::werkzeug-0.14.1-py36_0',
'channel-4::jinja2-2.9.6-py36h489bce4_1',
'channel-4::flask-0.12.2-py36hb24657c_0',
'channel-4::sqlite-3.20.1-haaaaaaa_4',
)
assert nodes == order
| 43.617807
| 169
| 0.628124
| 5,982
| 40,172
| 4.055166
| 0.061518
| 0.144117
| 0.065669
| 0.027331
| 0.906835
| 0.901105
| 0.890758
| 0.889026
| 0.883337
| 0.869981
| 0
| 0.189469
| 0.205267
| 40,172
| 920
| 170
| 43.665217
| 0.57035
| 0.043488
| 0
| 0.899633
| 0
| 0
| 0.55711
| 0.550492
| 0
| 0
| 0
| 0
| 0.041616
| 1
| 0.013464
| false
| 0
| 0.015912
| 0
| 0.034272
| 0.03672
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6dbd898a2710cc3149efcfbb019e8f09be959e40
| 1,779
|
py
|
Python
|
src/unicef_locations/migrations/0006_auto_20190110_2336.py
|
unicef/unicef-locations
|
4cfe80b6a1d8ac35b0fd243063d696ee7a132b3f
|
[
"Apache-2.0"
] | 3
|
2018-07-26T11:11:43.000Z
|
2021-05-11T11:01:09.000Z
|
src/unicef_locations/migrations/0006_auto_20190110_2336.py
|
unicef/unicef-locations
|
4cfe80b6a1d8ac35b0fd243063d696ee7a132b3f
|
[
"Apache-2.0"
] | 9
|
2018-07-26T15:33:41.000Z
|
2022-02-07T11:55:59.000Z
|
src/unicef_locations/migrations/0006_auto_20190110_2336.py
|
unicef/unicef-locations
|
4cfe80b6a1d8ac35b0fd243063d696ee7a132b3f
|
[
"Apache-2.0"
] | 1
|
2018-05-14T18:14:54.000Z
|
2018-05-14T18:14:54.000Z
|
# Generated by Django 2.0.7 on 2019-01-10 23:36
import django.utils.timezone
import model_utils.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('locations', '0005_auto_20181206_1127'),
]
operations = [
migrations.AddField(
model_name='cartodbtable',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now,
editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='cartodbtable',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now,
editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='gatewaytype',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now,
editable=False, verbose_name='created'),
),
migrations.AddField(
model_name='gatewaytype',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now,
editable=False, verbose_name='modified'),
),
migrations.AddField(
model_name='locationremaphistory',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now,
editable=False, verbose_name='modified'),
),
]
| 38.673913
| 100
| 0.547499
| 146
| 1,779
| 6.541096
| 0.30137
| 0.06911
| 0.119372
| 0.141361
| 0.776963
| 0.776963
| 0.700524
| 0.700524
| 0.700524
| 0.700524
| 0
| 0.027145
| 0.358066
| 1,779
| 45
| 101
| 39.533333
| 0.809107
| 0.025295
| 0
| 0.74359
| 1
| 0
| 0.100462
| 0.013279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09b05eb1878a1db509769131ebfd6487fed9d189
| 3,242
|
py
|
Python
|
tests/filters/test_upscale.py
|
mpdude/thumbor
|
21799d4731ed0901dcd85a3025f6836412333e09
|
[
"MIT"
] | 7
|
2019-12-01T07:11:21.000Z
|
2021-11-09T10:09:23.000Z
|
tests/filters/test_upscale.py
|
mpdude/thumbor
|
21799d4731ed0901dcd85a3025f6836412333e09
|
[
"MIT"
] | 3
|
2020-12-02T15:23:46.000Z
|
2021-04-13T15:44:34.000Z
|
tests/filters/test_upscale.py
|
mpdude/thumbor
|
21799d4731ed0901dcd85a3025f6836412333e09
|
[
"MIT"
] | 2
|
2020-03-08T01:30:49.000Z
|
2021-01-10T08:37:40.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
from preggy import expect
from tests.base import FilterTestCase
class UpscaleFilterTestCase(FilterTestCase):
def test_upscale_filter_with_fit_in_big(self):
def config_context(context):
context.request.fit_in = True
context.request.width = 1000
context.request.height = 1000
image = self.get_filtered(
'source.jpg',
'thumbor.filters.upscale',
'upscale()',
config_context=config_context
)
expected = self.get_fixture('upscale1.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.97)
def test_upscale_filter_with_fit_in_small(self):
def config_context(context):
context.request.fit_in = True
context.request.width = 400
context.request.height = 400
image = self.get_filtered(
'source.jpg',
'thumbor.filters.upscale',
'upscale()',
config_context=config_context
)
expected = self.get_fixture('upscale2.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.97)
def test_upscale_filter_with_full_fit_in(self):
def config_context(context):
context.request.fit_in = True
context.request.full = True
context.request.width = 800
context.request.height = 800
image = self.get_filtered(
'source.jpg',
'thumbor.filters.upscale',
'upscale()',
config_context=config_context
)
expected = self.get_fixture('upscale3.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.97)
def test_upscale_filter_with_adaptive_fit_in_big(self):
def config_context(context):
context.request.fit_in = True
context.request.adaptive = True
context.request.width = 1000
context.request.height = 1200
image = self.get_filtered(
'source.jpg',
'thumbor.filters.upscale',
'upscale()',
config_context=config_context
)
expected = self.get_fixture('upscale4.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.97)
def test_upscale_filter_with_adaptive_full_fit_in_big(self):
def config_context(context):
context.request.fit_in = True
context.request.full = True
context.request.adaptive = True
context.request.width = 800
context.request.height = 800
image = self.get_filtered(
'source.jpg',
'thumbor.filters.upscale',
'upscale()',
config_context=config_context
)
expected = self.get_fixture('upscale3.jpg')
ssim = self.get_ssim(image, expected)
expect(ssim).to_be_greater_than(0.97)
| 29.207207
| 64
| 0.610426
| 366
| 3,242
| 5.180328
| 0.213115
| 0.140295
| 0.085443
| 0.052743
| 0.813291
| 0.813291
| 0.813291
| 0.797996
| 0.75211
| 0.75211
| 0
| 0.025708
| 0.292104
| 3,242
| 110
| 65
| 29.472727
| 0.800436
| 0.072486
| 0
| 0.74026
| 0
| 0
| 0.09
| 0.038333
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12987
| false
| 0
| 0.025974
| 0
| 0.168831
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11012f5ec4a8f98ec6ea6ab4f63701e0b6ec34fb
| 50,551
|
py
|
Python
|
tests/integration-tests/components/test_slow.py
|
tum-ei-eda/mlonmcu
|
0d5c114b85f2ae9e48e7d815bfce8df04c2bdb46
|
[
"Apache-2.0"
] | 3
|
2022-03-07T09:38:12.000Z
|
2022-03-24T09:28:36.000Z
|
tests/integration-tests/components/test_slow.py
|
tum-ei-eda/mlonmcu
|
0d5c114b85f2ae9e48e7d815bfce8df04c2bdb46
|
[
"Apache-2.0"
] | 24
|
2022-03-07T16:09:32.000Z
|
2022-03-31T08:08:51.000Z
|
tests/integration-tests/components/test_slow.py
|
tum-ei-eda/mlonmcu
|
0d5c114b85f2ae9e48e7d815bfce8df04c2bdb46
|
[
"Apache-2.0"
] | 1
|
2022-03-07T09:38:17.000Z
|
2022-03-07T09:38:17.000Z
|
from mlonmcu.environment.config import PathConfig
from mlonmcu.session.run import RunStage
from mlonmcu.feature.features import (
get_available_features,
) # This does not really belong here
from mlonmcu.config import resolve_required_config
import pytest
# TODO: add user_session fixture which handles cleanup via session.discard()
def init_features(feature_names, config, context=None):
features = []
for feature_name in feature_names:
available_features = get_available_features(feature_name=feature_name)
for feature_cls in available_features:
required_keys = feature_cls.REQUIRED
if len(required_keys) > 0:
assert context is not None
config.update(
resolve_required_config(
required_keys,
features=features, # The order the features are provided is important here!
config=config,
cache=context.cache,
)
)
feature_inst = feature_cls(config=config)
features.append(feature_inst)
return features
# Frontends
DEFAULT_MODELS = [
"sine_model",
] # TODO: make sure that we use quant/float models and several different operators
DEFAULT_FRONTENDS = ["tflite"] # TODO: needs to match with the DEFAULT_MODELS
DEFAULT_BACKENDS = ["tflmi", "tvmaot"]
DEFAULT_PLATFORMS = ["mlif", "espidf"]
# DEFAULT_MLIF_TARGETS = ["host_x86", "etiss_pulpino", "spike", "ovpsim", "corstone300"]
DEFAULT_MLIF_TARGETS = ["host_x86", "etiss_pulpino", "spike", "corstone300"]
DEFAULT_ESPIDF_TARGETS = ["esp32", "esp32c3"]
DEFAULT_TARGETS = DEFAULT_MLIF_TARGETS + DEFAULT_ESPIDF_TARGETS
# VEXT_TARGETS = ["spike", "ovpsim"]
# RISCV_TARGETS = ["spike", "etiss_pulpino", "ovpsim"]
RISCV_TARGETS = ["spike", "etiss_pulpino"]
VEXT_TARGETS = ["spike"]
# DEBUG_ARENA_BACKENDS = ["tflmi", "tvmaot", "tvmrt", "tvmcg"]
DEBUG_ARENA_BACKENDS = ["tflmi", "tvmaot", "tvmrt"]
# TVM_EXAMPLE_CONFIG_COMMON = {}
TVM_EXAMPLE_CONFIG_COMMON = {
"extra_pass_config": {"relay.FuseOps.max_depth": 0}, # TODO
"disabled_passes": ["AlterOpLayout"],
"target_device": "arm_cpu",
"opt_level": 2,
}
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize("config", [{}])
def test_frontend_tflite(user_context, model_name, models_dir, feature_names, config):
frontend_name = "tflite"
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
session.process_runs(until=RunStage.LOAD, context=user_context)
report = session.get_reports()
df = report.df
assert len(df) == 1
assert df["Model"][0] == model_name
assert df["Frontend"][0] == frontend_name
# TODO: check artifacts
# Backends
# TODO: decide if execute on a per-framework basis?
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", ["tflite"])
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config", [{}, {"arena_size": 2**20, "operators": ["TODO"]}] # TODO
) # TODO: user should be ablte to overwrite sesstings parsed by frontend
def test_backend_tflmi(user_context, frontend_name, model_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
backend_name = "tflmi"
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context) # TODO: implicit Framework
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Framework"][0] == "tflm" # TODO: rename to tflm
assert df["Backend"][0] == backend_name
# TODO: check artifacts
@pytest.mark.skip("Currently not supported")
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", ["tflite"])
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize("config", [{}])
def test_backend_tflmc(user_context, frontend_name, model_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
backend_name = "tflmc"
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context) # TODO: implicit Framework
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Framework"][0] == "tflm" # TODO: rename to tflm
assert df["Backend"][0] == backend_name
# TODO: check artifacts
@pytest.mark.slow
@pytest.mark.context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", ["tflite"])
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config",
[
{},
{
**TVM_EXAMPLE_CONFIG_COMMON,
"arena_size": 2**20,
"alignment_bytes": 16,
},
],
)
def test_backend_tvmaot(user_context, frontend_name, model_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
backend_name = "tvmaot"
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
config = {f"{backend_name}.{key}": value for key, value in config.items()}
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Framework"][0] == "tvm"
assert df["Backend"][0] == backend_name
# TODO: check artifacts
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", ["tflite"])
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config",
[
{},
{
**TVM_EXAMPLE_CONFIG_COMMON,
"arena_size": 2**20,
},
],
) # TODO: combine tvm common configs
def test_backend_tvmrt(user_context, frontend_name, model_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
backend_name = "tvmrt"
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
user_context.environment.paths["models"] = [PathConfig(models_dir)]
config = {f"{backend_name}.{key}": value for key, value in config.items()}
session = user_context.create_session()
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Framework"][0] == "tvm"
assert df["Backend"][0] == backend_name
# TODO: check artifacts
# TODO: check arena and operators
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", ["tflite"])
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config",
[
{},
{
**TVM_EXAMPLE_CONFIG_COMMON,
"arena_size": 2**20,
},
],
)
def test_backend_tvmcg(user_context, frontend_name, model_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
backend_name = "tvmcg"
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
config = {f"{backend_name}.{key}": value for key, value in config.items()}
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Framework"][0] == "tvm"
assert df["Backend"][0] == backend_name
# TODO: check artifacts
# Platforms(Compile)/Targets(Run)
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS)
@pytest.mark.parametrize("backend_name", DEFAULT_BACKENDS)
@pytest.mark.parametrize("target_name", DEFAULT_MLIF_TARGETS)
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config", [{"tflmi.arena_size": 2**17, "tvmaot.arena_size": 2**17}]
) # corstone300 has limited RAM, TODO: find a better way!
def test_platform_mlif(
user_context, frontend_name, model_name, backend_name, target_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
platform_name = "mlif"
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: remove check?
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.COMPILE, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Platform"][0] == platform_name
assert df["Target"][0] == target_name
# TODO: check artifacts
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS)
@pytest.mark.parametrize("backend_name", DEFAULT_BACKENDS)
@pytest.mark.parametrize("target_name", DEFAULT_MLIF_TARGETS)
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config", [{"tflmi.arena_size": 2**17, "tvmaot.arena_size": 2**17}]
) # corstone300 has limited RAM, TODO: find a better way!
def test_target_mlif(
user_context, frontend_name, model_name, backend_name, target_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
platform_name = "mlif"
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: remove check?
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Platform"][0] == platform_name
assert df["Target"][0] == target_name
# TODO: check artifacts
# TODO: etiss_verbose!
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS)
@pytest.mark.parametrize("backend_name", DEFAULT_BACKENDS)
@pytest.mark.parametrize("target_name", DEFAULT_ESPIDF_TARGETS)
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config", [{"espidf.wait_for_user": False, "tflmi.arena_size": 2**17, "tvmaot.arena_size": 2**17}]
)
def test_platform_espidf(
user_context, frontend_name, model_name, backend_name, target_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
platform_name = "espidf"
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.COMPILE, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Platform"][0] == platform_name
assert df["Target"][0] == target_name
# TODO: check artifacts
@pytest.mark.slow
@pytest.mark.hardware
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS)
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS)
@pytest.mark.parametrize("backend_name", DEFAULT_BACKENDS)
@pytest.mark.parametrize("target_name", ["esp32c3"])
@pytest.mark.parametrize("feature_names", [[]])
@pytest.mark.parametrize(
"config",
[
{
"espidf.wait_for_user": False,
"espidf.use_idf_monitor": False,
"espidf.port": "/dev/ttyUSB0",
"tflmi.arena_size": 2**17, # esp32c3 ram ~300kB
"tvmaot.arena_size": 2**17,
}
],
)
def test_target_espidf(
user_context, frontend_name, model_name, backend_name, target_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
platform_name = "espidf"
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert df["Platform"][0] == platform_name
assert df["Target"][0] == target_name
# TODO: check artifacts
# # PostProcesses
# Features
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: single model would be enough
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS)
@pytest.mark.parametrize("backend_name", DEBUG_ARENA_BACKENDS)
@pytest.mark.parametrize("target_name", ["host_x86"])
@pytest.mark.parametrize("platform_name", ["mlif"])
@pytest.mark.parametrize(
"feature_names", [["debug_arena", "debug"]]
) # TODO: should debug_arena set {target}.print_outputs automatically?
@pytest.mark.parametrize(
"config", [{"host_x86.print_outputs": True}]
) # TODO: get rid of this by writing stdout to an artifact/file
def test_feature_debug_arena(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "debug_arena" in df["Features"][0]
# TODO: check artifacts
# Check generated code
# Check stdout
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tflmi"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["etiss_pulpino"])
@pytest.mark.parametrize("platform_name", ["mlif"]) # TODO: add validate to espidf and test this as well
@pytest.mark.parametrize("feature_names", [["validate", "debug"]]) # currently validate does not imply debug
@pytest.mark.parametrize(
"config", [{"host_x86.print_outputs": True}] # We do not ned this if we just use the report col
)
def test_feature_validate(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "validate" in df["Features"][0]
assert "Validation" in df.columns
assert df["Validation"][0] # if model has validation data else, missing/NaN/None?
# TODO: force correct
# TODO: force missmatch
# TODO: force missing
# TODO: force invalid size
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tflmi"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["host_x86"])
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["debug"]]) # currently validate does not imply debug
@pytest.mark.parametrize(
"config", [{"host_x86.print_outputs": True}] # We do not ned this if we just use the report col
)
def test_feature_debug(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "debug" in df["Features"][0]
# TODO: stdout with test model
# TODO: 2 runs to compare ROM/RAM/Cycles?
# TODO: test with prebuild elf?
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tflmi"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", RISCV_TARGETS) # TODO: more targets (without vext)
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize(
"feature_names", [["muriscvnn"], ["muriscvnn", "debug"]]
) # currently validate does not imply debug
@pytest.mark.parametrize("config", [{}])
def test_feature_muriscvnn(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
print("skip", feature)
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "muriscvnn" in df["Features"][0]
# TODO: find out if kernels are actually linked?
# TODO: 2 runs to compare ROM/RAM/Cycles?
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tflmi"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", VEXT_TARGETS) # TODO: any backend would work for scalar code...
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["vext"], ["vext", "muriscvnn"]]) # currently validate does not imply debug
@pytest.mark.parametrize("config", [{"vext.vlen": 128}]) # TODO: add multiple vlens
def test_feature_vext(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "vext" in df["Features"][0]
# TODO: find out if kernels are actually linked?
# TODO: 2 runs to compare ROM/RAM/Cycles?
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tflmi"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["etiss_pulpino"])
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["etissdbg"]]) # This is not etiss_pulpino.verbose=1!!!
@pytest.mark.parametrize("config", [{}])
def test_feature_etissdbg(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.COMPILE, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "etissdbg" in df["Features"][0]
# TODO: run gdb but how?
# TODO: check stdout
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tflmi"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["etiss_pulpino"])
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["trace"]]) # currently validate does not imply debug
@pytest.mark.parametrize("config", [{}])
def test_feature_trace(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
run.add_platform_by_name(platform_name, context=user_context)
run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.RUN, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "trace" in df["Features"][0]
assert "RAM stack" in df.columns
assert "RAM heap" in df.columns
# TODO: check for dyn. memory metrics columns
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tvmaot"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["host_x86"])
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["unpacked_api"]])
@pytest.mark.parametrize("config", [{}])
def test_feature_unpacked_api(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
# run.add_platform_by_name(platform_name, context=user_context)
# run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "unpacked_api" in df["Features"][0]
# TODO: check generated code -> do not run at all (would need to check for metrics changes)
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tvmaot"] # -> add tvm if we have a test model for this
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["host_x86"])
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["usmp"]])
@pytest.mark.parametrize(
"config",
[{"usmp.algorithm": "greedy_by_size"}, {"usmp.algorithm": "greedy_by_conflicts"}, {"usmp.algorithm": "hill_climb"}],
)
def test_feature_usmp(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
# run.add_platform_by_name(platform_name, context=user_context)
# run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "usmp" in df["Features"][0]
# TODO: run twice and compare generted code or look for specific code
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", DEFAULT_MODELS) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", DEFAULT_FRONTENDS) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tvmaot"] # other tvm backends?
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("target_name", ["host_x86"])
@pytest.mark.parametrize(
"platform_name", ["mlif"]
) # If we would rename host_x86 to linux we could also use espidf here?
@pytest.mark.parametrize("feature_names", [["disable_legalize"]])
@pytest.mark.parametrize("config", [{}])
def test_feature_disable_legalize(
user_context, frontend_name, model_name, backend_name, target_name, platform_name, models_dir, feature_names, config
):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
if not user_context.environment.has_platform(platform_name):
pytest.skip(f"Platform '{platform_name}' is not enabled.") # TODO: not enabled -> not installed
if not user_context.environment.has_target(target_name):
pytest.skip(f"Target '{target_name}' is not enabled.") # TODO: not enabled -> not installed
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
# run.add_platform_by_name(platform_name, context=user_context)
# run.add_target_by_name(target_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "disable_legalize" in df["Features"][0]
# TODO: run twice and compare codegen results
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", ["sine_model"]) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", ["tflite"]) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tvmaot"] # other tvm backends?
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("feature_names", [["autotune"]])
@pytest.mark.parametrize("config", [{}])
def test_feature_autotune(user_context, frontend_name, model_name, backend_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
success = session.process_runs(until=RunStage.TUNE, context=user_context)
report = session.get_reports()
df = report.df
assert success
assert len(df) == 1
assert "autotune" in df["Features"][0]
@pytest.mark.slow
@pytest.mark.user_context
@pytest.mark.parametrize("model_name", ["sine_model"]) # TODO: add test model for this, also test with wrong data
@pytest.mark.parametrize("frontend_name", ["tflite"]) # Validate is frontend feature as well
@pytest.mark.parametrize(
"backend_name", ["tvmaot"] # other tvm backends?
) # TODO: Single backend would be fine, but it has to be enabled
@pytest.mark.parametrize("feature_names", [["autotune", "autotuned"]]) # TODO: provide tuning records instead
@pytest.mark.parametrize("config", [{"tvmaot.print_outputs": True}])
def test_feature_autotuned(user_context, frontend_name, model_name, backend_name, models_dir, feature_names, config):
if not user_context.environment.has_frontend(frontend_name):
pytest.skip(f"Frontend '{frontend_name}' is not enabled.")
if not user_context.environment.has_backend(backend_name):
pytest.skip(f"Backend '{backend_name}' is not enabled.")
for feature in feature_names:
if not user_context.environment.has_feature(feature):
pytest.skip(f"Feature '{feature}' is not enabled.")
features = init_features(feature_names, config, context=user_context)
user_context.environment.paths["models"] = [PathConfig(models_dir)]
session = user_context.create_session()
run = session.create_run(features=features, config=config)
run.add_frontend_by_name(frontend_name, context=user_context)
run.add_model_by_name(model_name, context=user_context)
run.add_backend_by_name(backend_name, context=user_context)
success = session.process_runs(until=RunStage.BUILD, context=user_context)
report = session.get_reports()
df = report.df
print("artifacts", run.artifacts_per_stage)
assert success
assert len(df) == 1
assert "autotuned" in df["Features"][0]
# TODO:
# cmsisnn -> currently broken
# gdbserver -> hard to test
| 49.60844
| 120
| 0.732765
| 6,837
| 50,551
| 5.185315
| 0.043733
| 0.098048
| 0.080221
| 0.057712
| 0.911063
| 0.906352
| 0.897918
| 0.8905
| 0.887284
| 0.883899
| 0
| 0.003874
| 0.152282
| 50,551
| 1,018
| 121
| 49.657171
| 0.82342
| 0.133113
| 0
| 0.811739
| 0
| 0
| 0.154847
| 0.002543
| 0
| 0
| 0
| 0.000982
| 0.088594
| 1
| 0.025471
| false
| 0.002215
| 0.005537
| 0
| 0.032115
| 0.006645
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28aa28b5177a467ce389681a3861f4d4854d32e6
| 14,203
|
py
|
Python
|
src/graphql/document/test/parser.py
|
btrekkie/graphql
|
6c118550267eeb57a9653f4f46d7bbd6c5902110
|
[
"MIT"
] | null | null | null |
src/graphql/document/test/parser.py
|
btrekkie/graphql
|
6c118550267eeb57a9653f4f46d7bbd6c5902110
|
[
"MIT"
] | null | null | null |
src/graphql/document/test/parser.py
|
btrekkie/graphql
|
6c118550267eeb57a9653f4f46d7bbd6c5902110
|
[
"MIT"
] | null | null | null |
import unittest
from graphql.document import GraphQlParser
from graphql.document import GraphQlFieldQuery
from graphql.document import GraphQlFragmentReference
from graphql.document import GraphQlParseError
from graphql.schema import GraphQlSchemaFactory
class GraphQlParserTest(unittest.TestCase):
def _schema(self):
"""Return a GraphQlSchema for the "star_wars" module."""
return GraphQlSchemaFactory.create_from_modules([
'graphql.executor.test.star_wars',
'graphql.scalar_descriptors.strict'])
def test_valid(self):
"""Test GraphQlParser.parse() on valid documents."""
schema = GraphQlSchemaFactory.create_from_modules(
[
'graphql.executor.test.star_wars',
'graphql.scalar_descriptors.strict'],
'Query')
document = GraphQlParser('{human(id: "1000"){id}}', schema).parse()
self.assertEqual(schema, document.schema)
self.assertEqual(1, len(document.operations))
operation = document.operations[0]
self.assertIsNone(operation.name)
self.assertEqual({}, operation.variables)
self.assertEqual([], operation.directives)
self.assertEqual(
1, len(operation.selection_set.field_queries_and_fragments))
self.assertEqual('Query', operation.selection_set.base_type.name)
human_field_query = (
operation.selection_set.field_queries_and_fragments[0])
self.assertIsInstance(human_field_query, GraphQlFieldQuery)
self.assertEqual('human', human_field_query.response_key)
self.assertEqual('human', human_field_query.field_descriptor.name)
self.assertEqual(['id'], list(human_field_query.args.keys()))
self.assertEqual('1000', human_field_query.args['id'])
self.assertEqual([], human_field_query.directives)
self.assertEqual(
1,
len(human_field_query.selection_set.field_queries_and_fragments))
id_field_query = (
human_field_query.selection_set.field_queries_and_fragments[0])
self.assertIsInstance(id_field_query, GraphQlFieldQuery)
self.assertEqual('id', id_field_query.response_key)
self.assertEqual('id', id_field_query.field_descriptor.name)
self.assertEqual({}, id_field_query.args)
self.assertEqual([], id_field_query.directives)
self.assertIsNone(id_field_query.selection_set)
document = GraphQlParser(
'query foo {human(id: "1000")'
'{bar: id, ... on Human {homePlanet}}}',
schema).parse()
self.assertEqual(schema, document.schema)
self.assertEqual(1, len(document.operations))
operation = document.operations[0]
self.assertEqual('foo', operation.name)
self.assertEqual({}, operation.variables)
self.assertEqual([], operation.directives)
self.assertEqual(
1, len(operation.selection_set.field_queries_and_fragments))
self.assertEqual('Query', operation.selection_set.base_type.name)
human_field_query = (
operation.selection_set.field_queries_and_fragments[0])
self.assertIsInstance(human_field_query, GraphQlFieldQuery)
self.assertEqual('human', human_field_query.response_key)
self.assertEqual('human', human_field_query.field_descriptor.name)
self.assertEqual(['id'], list(human_field_query.args.keys()))
self.assertEqual('1000', human_field_query.args['id'])
self.assertEqual([], human_field_query.directives)
self.assertEqual(
2,
len(human_field_query.selection_set.field_queries_and_fragments))
id_field_query = (
human_field_query.selection_set.field_queries_and_fragments[0])
self.assertIsInstance(id_field_query, GraphQlFieldQuery)
self.assertEqual('bar', id_field_query.response_key)
self.assertEqual('id', id_field_query.field_descriptor.name)
self.assertEqual({}, id_field_query.args)
self.assertEqual([], id_field_query.directives)
self.assertIsNone(id_field_query.selection_set)
fragment_reference = (
human_field_query.selection_set.field_queries_and_fragments[1])
self.assertIsInstance(fragment_reference, GraphQlFragmentReference)
self.assertEqual([], fragment_reference.directives)
fragment = fragment_reference.fragment
self.assertEqual(
1, len(fragment.selection_set.field_queries_and_fragments))
home_planet_field_query = (
fragment.selection_set.field_queries_and_fragments[0])
self.assertIsInstance(home_planet_field_query, GraphQlFieldQuery)
self.assertEqual('homePlanet', home_planet_field_query.response_key)
self.assertEqual(
'homePlanet', home_planet_field_query.field_descriptor.name)
self.assertEqual({}, home_planet_field_query.args)
self.assertEqual([], home_planet_field_query.directives)
self.assertIsNone(home_planet_field_query.selection_set)
def test_no_errors(self):
"""Ensure that GraphQlParser doesn't raise on valid documents."""
schema = self._schema()
# Make sure none of the following raises a GraphQlParseError
GraphQlParser('query {human(id: "1000"){id}}', schema).parse()
GraphQlParser('{human(id: "1000"){__typename}}', schema).parse()
GraphQlParser(
"{human, (,id,:, \"1000\\n\",)\t\n\r\n,{,id,},},",
schema).parse()
GraphQlParser(
'{human(id: "1000"){id, ... on Human {homePlanet}}}',
schema).parse()
GraphQlParser(
'{human(id: "1000"){id, ... on Human {id}}}', schema).parse()
GraphQlParser(
'{human(id: "1000"){id, ... on Character '
'{... on Human {homePlanet}}}}',
schema).parse()
GraphQlParser(
'query ($foo: String!) {human(id: $foo){id, ...HumanFields}} '
'fragment HumanFields on Human {homePlanet}',
schema).parse()
GraphQlParser(
'query ($foo: String!) {...rootFields} '
'fragment rootFields on Query '
'{human(id: $foo){...HumanFields}}'
'fragment HumanFields on Human {homePlanet}',
schema).parse()
GraphQlParser(
'query foo {human(id: "1000"){id}} '
'query bar {human(id: "1001"){id}}',
schema).parse()
GraphQlParser(
'{human(id: "1000"){id}, '
'human(id: "1000"){... on Human{homePlanet}}}',
schema).parse()
GraphQlParser(
'query($foo: Boolean!) '
'{human(id: "1000"){id @include(if: $foo)}}',
schema).parse()
GraphQlParser(
'query($foo: Boolean = true) '
'{human(id: "1000"){id @include(if: $foo)}}',
schema).parse()
GraphQlParser(
"{human(id: \"1000\") # The human\n"
"{id # The ID\n"
'}}',
schema).parse()
GraphQlParser('{human(id: "#1000"){id}}', schema).parse()
def test_invalid_syntax(self):
"""Ensure that GraphQlParser raises on invalid syntax."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser('', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000"){id}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000"){id}}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000"){}}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('query foo() {human(id: "1000"){}}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000"){id{}}}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000") # {id}}}', schema).parse()
def test_invalid_operation_naming(self):
"""Ensure GraphQlParser raises on invalid operation naming."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query foo {human(id: "1000"){id}} '
'query foo {human(id: "1000"){id}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query foo {human(id: "1000"){id}} '
'query {human(id: "1000"){id}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query {human(id: "1000"){id}} '
'query foo {human(id: "1000"){id}}',
schema).parse()
def test_invalid_field_queries(self):
"""Ensure GraphQlParser raises on invalid field queries."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000")'
'{id, ... on Human{... on Character {homePlanet}}}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000"){homePlanet}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){... on Human {homePlanet}, '
'... on Human {homePlanet: date}}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){id}, human(id: "1001"){id}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000")}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human(id: "1000"){id{foo}}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){id{__typename}}}', schema).parse()
def test_invalid_args(self):
"""Ensure that GraphQlParser raises on invalid arguments."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000", foo: "bar"){id}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000", id: "1000"){id}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('{human{id}}', schema).parse()
def test_invalid_fragments(self):
"""Ensure that GraphQlParser raises on invalid fragments."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){...CharacterFields}} '
'fragment CharacterFields on Character {id} '
'fragment CharacterFields on Character {id}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){...CharacterFields}} '
'fragment CharacterFields on DoesNotExist {id}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){id{...idFields}}} '
'fragment idFields on ID {foo}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){id}} '
'fragment CharacterFields on Character {id}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){...CharacterFields}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){...CharacterFields}} '
'fragment CharacterFields on Character {...CharacterFields}',
schema).parse()
def test_invalid_directives(self):
"""Ensure that GraphQlParser raises on invalid directives."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'{human(id: "1000"){id @doesNotExist}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'@include(if: true) {human("123"){id}}', schema).parse()
def test_invalid_variables(self):
"""Ensure that GraphQlParser raises on invalid variables."""
schema = self._schema()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query($foo: String!, $foo: String!) {human($foo){id}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query($foo: String! = 123) {human($foo){id}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query($foo: Boolean! = true) '
'{human(id: "1000"){id @include(if: $foo)}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query($foo: Human) {human($foo){id}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser('query {human(id: $foo){id}}', schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query ($foo: String!) {...rootFields} '
'fragment rootFields on Query '
'{human1: human(id: $foo){id}, human2: human(id: $bar){id}}',
schema).parse()
with self.assertRaises(GraphQlParseError):
GraphQlParser(
'query($foo: ID!) {human(id: $foo){id}}', schema).parse()
| 46.567213
| 79
| 0.59938
| 1,344
| 14,203
| 6.190476
| 0.081845
| 0.045433
| 0.058173
| 0.160096
| 0.859976
| 0.846995
| 0.802644
| 0.731971
| 0.716587
| 0.669712
| 0
| 0.020995
| 0.265578
| 14,203
| 304
| 80
| 46.720395
| 0.776627
| 0.042315
| 0
| 0.630824
| 0
| 0.003584
| 0.20825
| 0.02295
| 0
| 0
| 0
| 0
| 0.30466
| 1
| 0.035842
| false
| 0
| 0.021505
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e913b0ab4c8be7cced10f3e48dce2a4dd1ff885d
| 245
|
py
|
Python
|
docusign_esign/client/__init__.py
|
pivotal-energy-solutions/docusign-python-client
|
f3edd0b82e57999bc8848a63a0477712714ee437
|
[
"MIT"
] | null | null | null |
docusign_esign/client/__init__.py
|
pivotal-energy-solutions/docusign-python-client
|
f3edd0b82e57999bc8848a63a0477712714ee437
|
[
"MIT"
] | null | null | null |
docusign_esign/client/__init__.py
|
pivotal-energy-solutions/docusign-python-client
|
f3edd0b82e57999bc8848a63a0477712714ee437
|
[
"MIT"
] | 1
|
2021-04-26T20:52:45.000Z
|
2021-04-26T20:52:45.000Z
|
from __future__ import absolute_import
# import auth modules into client package
from .auth.oauth import Account
from .auth.oauth import Organization
from .auth.oauth import Link
from .auth.oauth import OAuth
from .auth.oauth import OAuthToken
| 27.222222
| 41
| 0.82449
| 36
| 245
| 5.472222
| 0.388889
| 0.203046
| 0.329949
| 0.482234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130612
| 245
| 8
| 42
| 30.625
| 0.924883
| 0.159184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e92d312b4afdf9b2c5301906294e7fe36bb2d511
| 7,967
|
py
|
Python
|
modules/get_aiq_inputs.py
|
schubb2003/aiq
|
8a368ab7a3b07051d04f7f4be4267c86d3fb0d89
|
[
"MIT"
] | null | null | null |
modules/get_aiq_inputs.py
|
schubb2003/aiq
|
8a368ab7a3b07051d04f7f4be4267c86d3fb0d89
|
[
"MIT"
] | null | null | null |
modules/get_aiq_inputs.py
|
schubb2003/aiq
|
8a368ab7a3b07051d04f7f4be4267c86d3fb0d89
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
"""
# Author: Scott Chubb scott.chubb@netapp.com
# Written for Python 3.7 and above
# No warranty is offered, use at your own risk. While these scripts have been
# tested in lab situations, all use cases cannot be accounted for.
"""
import argparse
from getpass import getpass
def get_inputs():
"""
Get the inputs for connecting to the cluster
"""
parser = argparse.ArgumentParser()
parser.add_argument('-u', type=str,
required=True,
metavar='user',
help='AIQ username')
parser.add_argument('-p', type=str,
required=False,
metavar='user_pass',
help='AIQ password')
parser.add_argument('--search-customer', type=str.lower,
required=False,
metavar='customer name',
dest='search_customer',
help='customer name to search on')
parser.add_argument('--search-string', type=str.lower,
required=False,
metavar='text to search for',
dest='search_string',
help='log text to search for')
parser.add_argument('--sort-order',
choices=["Cluster", "Date", "Severity", "Details", "Type"],
required=False,
metavar='sort_order',
dest='sort_order',
help='column to sort on for certain script outputs')
parser.add_argument('--node-id', type=int,
required=False,
metavar='node_id',
dest='node_id',
help='node ID required by some scripts')
parser.set_defaults(blank_serial=False)
args = parser.parse_args()
user = args.u
if not args.p:
user_pass = getpass("Enter password for user {}: ".format(user))
else:
user_pass = args.p
search_customer = args.search_customer
search_string = args.search_string
sort_order = args.sort_order
node_id = args.node_id
return user, user_pass, search_customer, search_string, sort_order, node_id
def get_inputs_inventory():
"""
Get the inputs for connecting to the cluster
"""
parser = argparse.ArgumentParser()
parser.add_argument('-u', type=str,
required=True,
metavar='user',
help='AIQ username')
parser.add_argument('-p', type=str,
required=False,
metavar='user_pass',
help='AIQ password')
parser.add_argument('--version', type=str,
required=False,
metavar='search_vers',
help='Version of Element to search on')
parser.add_argument('--model', type=str.upper,
required=False,
metavar='search_model',
help='Type of node to search on')
parser.add_argument('--search-customer', type=str.lower,
required=False,
metavar='search_customer',
dest='search_customer',
help='Customer to search on')
parser.add_argument('--no-serial',
action='store_true')
parser.set_defaults(blank_serial=False)
args = parser.parse_args()
user = args.u
if not args.p:
user_pass = getpass("Enter password for user {}: ".format(user))
else:
user_pass = args.p
if args.version is not None:
search_vers = args.version
else:
search_vers = None
if args.model is not None:
search_model = args.model
else:
search_model = None
if args.no_serial:
blank_serial = True
else:
blank_serial = False
search_customer = args.search_customer
return user, user_pass, search_vers, search_model, blank_serial, search_customer
def get_inputs_logs():
"""
Get the inputs for connecting to the cluster
"""
parser = argparse.ArgumentParser()
parser.add_argument('-u', type=str,
required=True,
metavar='user',
help='AIQ username')
parser.add_argument('-p', type=str,
required=False,
metavar='user_pass',
help='AIQ password')
parser.add_argument('--search-customer', type=str.lower,
required=False,
metavar='customer name',
dest='search_customer',
help='customer name to search on')
parser.add_argument('--search-string', type=str.lower,
required=False,
metavar='text to search for',
dest='search_string',
help='log text to search for')
parser.add_argument('--sort-order',
choices=["Cluster", "Date", "Severity", "Details", "Type"],
required=False,
metavar='sort_order',
dest='sort_order',
help='column to sort on for certain script outputs')
parser.add_argument('--search-cluster', type=str.lower,
required=False,
metavar='search_cluster',
dest='search_cluster',
help='search for a particular cluster in an output')
parser.set_defaults(blank_serial=False)
args = parser.parse_args()
user = args.u
if not args.p:
user_pass = getpass("Enter password for user {}: ".format(user))
else:
user_pass = args.p
search_customer = args.search_customer
search_string = args.search_string
sort_order = args.sort_order
search_cluster = args.search_cluster
return user, user_pass, search_customer, search_string, sort_order, search_cluster
def get_inputs_disks():
"""
Get the inputs for connecting to the cluster
"""
parser = argparse.ArgumentParser()
parser.add_argument('-u', type=str,
required=True,
metavar='user',
help='AIQ username')
parser.add_argument('-p', type=str,
required=False,
metavar='user_pass',
help='AIQ password')
parser.add_argument('--search-customer', type=str.lower,
required=False,
metavar='customer name',
dest='search_customer',
help='customer name to search on')
parser.add_argument('--search-cluster', type=str.lower,
required=False,
metavar='search_cluster',
dest='search_cluster',
help='search for a particular cluster in an output')
parser.set_defaults(blank_serial=False)
args = parser.parse_args()
user = args.u
if not args.p:
user_pass = getpass("Enter password for user {}: ".format(user))
else:
user_pass = args.p
search_customer = args.search_customer
search_cluster = args.search_cluster
return user, user_pass, search_customer, search_cluster
def main():
"""
Nothing here as this is a module
"""
print(f"This is a support module and has no output of its own")
if __name__ == "__main__":
main()
| 37.403756
| 87
| 0.516255
| 812
| 7,967
| 4.907635
| 0.160099
| 0.049686
| 0.093852
| 0.046173
| 0.787704
| 0.759348
| 0.745797
| 0.741782
| 0.741782
| 0.741782
| 0
| 0.000616
| 0.388854
| 7,967
| 212
| 88
| 37.580189
| 0.817827
| 0.057487
| 0
| 0.77381
| 0
| 0
| 0.182007
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029762
| false
| 0.125
| 0.011905
| 0
| 0.065476
| 0.005952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3ada7b3bbb6648a1380173ea089c159cc30d2b2d
| 11,138
|
py
|
Python
|
tests/unit/test_mockings.py
|
hellmage/pacte
|
a3b6c2b39b52d6e8c1bb5d0df305e5fc30251fff
|
[
"MIT"
] | null | null | null |
tests/unit/test_mockings.py
|
hellmage/pacte
|
a3b6c2b39b52d6e8c1bb5d0df305e5fc30251fff
|
[
"MIT"
] | null | null | null |
tests/unit/test_mockings.py
|
hellmage/pacte
|
a3b6c2b39b52d6e8c1bb5d0df305e5fc30251fff
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2017 App Annie Inc. All rights reserved.
import unittest
import requests
from requests_mock import NoMockAddress
from pacte.contract import Contract
from pacte.mockings import MockAPI, MockServices
class TestMockServices(unittest.TestCase):
maxDiff = None
def test_base(self):
contract = Contract("test_provider", "test_consumer")
contract.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body={"key": "value"}
)
with MockServices(MockAPI(contract)) as service:
response = requests.get('{}/path'.format(service.mock_apis[0].get_service_host()))
self.assertEqual(response.status_code, 200)
self.assertDictEqual(response.json(), {"key": "value"})
def test_multi_interactions(self):
contract = Contract("test_provider", "test_consumer")
contract.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
headers={"Custom-Header": "value"}, ).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body={"key": "value"})
with MockServices(MockAPI(contract)) as service:
response = requests.get('{}/path'.format(service.mock_apis[0].get_service_host()))
self.assertEqual(response.status_code, 200)
self.assertDictEqual(response.json(), {"key": "value"})
contract2 = Contract("test_provider2", "test_consumer")
contract2.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path2",
headers={"Custom-Header": "value"}, ).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body={"key": "value2"})
with MockServices(MockAPI(contract2)) as service2:
response = requests.get('{}/path2'.format(service2.mock_apis[0].get_service_host()))
self.assertEqual(response.status_code, 200)
self.assertDictEqual(response.json(), {"key": "value2"})
contract3 = Contract("test_provider2", "test_consumer")
contract3.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path3",
headers={"Custom-Header": "value"}, ).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body={"key": "value3"})
with MockServices(MockAPI(contract3)) as service3:
response = requests.get('{}/path3'.format(service3.mock_apis[0].get_service_host()))
self.assertEqual(response.status_code, 200)
self.assertDictEqual(response.json(), {"key": "value3"})
def test_mock_service_functional_json(self):
contract = Contract('provider', 'consumer')
contract.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body={"key": "value"}
)
with MockServices(MockAPI(contract, port=1234)):
response = requests.get('http://localhost:1234/path')
self.assertEqual(response.status_code, 200)
self.assertDictEqual(response.json(), {"key": "value"})
def test_mock_service_multi_functional(self):
contract = Contract('provider', 'consumer')
contract.given("Test").upon_receiving("a request").with_request(
method="get",
path="/get_json",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body={"key": "value"}
)
contract.given("Test2").upon_receiving("second request").with_request(
method="get",
path="/get_str",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Content-Type": "text/html"},
body="Test String Response"
)
with MockServices(MockAPI(contract, port=1234)):
response = requests.get('http://localhost:1234/get_json')
self.assertEqual(response.status_code, 200)
self.assertDictEqual(response.json(), {"key": "value"})
response = requests.get('http://localhost:1234/get_str')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.text, "Test String Response")
def test_mock_service_functional_str(self):
contract = Contract('provider', 'consumer')
contract.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Custom-Header": "value"},
body="Test String Response"
)
with MockServices(MockAPI(contract, port=1234)):
response = requests.get('http://localhost:1234/path')
assert response.status_code == 200
self.assertEqual(response.text, "Test String Response")
def test_mock_service_get_querystr(self):
contract = Contract('provider', 'consumer')
contract.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
query="date=2017-11-21",
).will_respond_with(
status=200,
headers={"content-type": "application/json", 'X-Request-ID': '9v7uygi2hop'},
body={"key": "value"}
)
with MockServices(MockAPI(contract, port=1234)):
with self.assertRaises(NoMockAddress):
requests.get('http://localhost:1234/path')
with self.assertRaises(NoMockAddress):
requests.get('http://localhost:1234/path?date=2017-11-22')
response = requests.get('http://localhost:1234/path?date=2017-11-21')
self.assertEqual(response.status_code, 200)
self.assertDictEqual({"key": "value"}, response.json())
self.assertEqual('application/json', response.headers['Content-Type'])
self.assertEqual('9v7uygi2hop', response.headers['X-Request-ID'])
def test_mock_service_post_querystr(self):
contract = Contract('provider', 'consumer')
contract.given("Test2").upon_receiving("a request2").with_request(
method="post",
path="/path",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Content-Type": "application/json"},
body={"key": "value"}
)
with MockServices(MockAPI(contract, port=1234)):
with self.assertRaises(NoMockAddress):
requests.get('http://localhost:1234/path')
with self.assertRaises(NoMockAddress):
requests.get('http://localhost:1234/path?date=2017-11-22')
response = requests.post('http://localhost:1234/path', data={"test": "data"})
self.assertEqual(response.status_code, 200)
self.assertDictEqual({"key": "value"}, response.json())
self.assertEqual('application/json', response.headers['Content-Type'])
def test_mock_service_multi_interactions_requests(self):
contract_get = Contract('provider_get', 'consumer')
contract_get.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
query="date=2017-11-21",
).will_respond_with(
status=200,
headers={"content-type": "application/json", 'X-Request-ID': '9v7uygi2hop'},
body={"key": "value"}
)
contract_post = Contract('provider_post', 'consumer')
contract_post.given("Test2").upon_receiving("a request2").with_request(
method="post",
path="/path",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Content-Type": "application/json"},
body={"key": "value"}
)
with MockServices(MockAPI(contract_get, domain='domain_get', port=1234)):
response_get = requests.get('http://domain_get:1234/path?date=2017-11-21')
self.assertEqual(response_get.status_code, 200)
self.assertDictEqual({"key": "value"}, response_get.json())
self.assertEqual('application/json', response_get.headers['Content-Type'])
self.assertEqual('9v7uygi2hop', response_get.headers['X-Request-ID'])
with MockServices(MockAPI(contract_post, domain='domain_post', port=1234)):
response_post = requests.post('http://domain_post:1234/path', data={"test": "data"})
self.assertEqual(response_post.status_code, 200)
self.assertDictEqual({"key": "value"}, response_post.json())
self.assertEqual('application/json', response_post.headers['Content-Type'])
def test_mock_multiple_services(self):
contract_get = Contract('provider_get', 'consumer')
contract_get.given("Test").upon_receiving("a request").with_request(
method="get",
path="/path",
query="date=2017-11-21",
).will_respond_with(
status=200,
headers={"content-type": "application/json", 'X-Request-ID': '9v7uygi2hop'},
body={"key": "value"}
)
mock_get_api = MockAPI(contract_get, domain='domain_get', port=1234)
contract_post = Contract('provider_post', 'consumer')
contract_post.given("Test2").upon_receiving("a request2").with_request(
method="post",
path="/path",
headers={"Custom-Header": "value"},
).will_respond_with(
status=200,
headers={"Content-Type": "application/json"},
body={"key": "value"}
)
mock_post_api = MockAPI(contract_post, domain='domain_post', port=1234)
with MockServices([mock_get_api, mock_post_api]):
response_get = requests.get('http://domain_get:1234/path?date=2017-11-21')
self.assertEqual(response_get.status_code, 200)
self.assertDictEqual({"key": "value"}, response_get.json())
self.assertEqual('application/json', response_get.headers['Content-Type'])
self.assertEqual('9v7uygi2hop', response_get.headers['X-Request-ID'])
response_post = requests.post('http://domain_post:1234/path', data={"test": "data"})
self.assertEqual(response_post.status_code, 200)
self.assertDictEqual({"key": "value"}, response_post.json())
self.assertEqual('application/json', response_post.headers['Content-Type'])
| 44.198413
| 96
| 0.598402
| 1,153
| 11,138
| 5.627927
| 0.090199
| 0.055479
| 0.052705
| 0.066574
| 0.891509
| 0.866389
| 0.862999
| 0.843273
| 0.807366
| 0.801818
| 0
| 0.035115
| 0.250853
| 11,138
| 251
| 97
| 44.374502
| 0.74257
| 0.004848
| 0
| 0.738739
| 0
| 0
| 0.205649
| 0
| 0
| 0
| 0
| 0
| 0.184685
| 1
| 0.040541
| false
| 0
| 0.022523
| 0
| 0.072072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3af7403e10d413e8d55372ccdd6c5463a7eb21c3
| 145,343
|
py
|
Python
|
OriginalMMArcProModelExport.py
|
MetroSTL/MapMakerETL-Pipeline
|
931403ed3c5b1d7ed3872d122965afad4e2cded7
|
[
"MIT"
] | null | null | null |
OriginalMMArcProModelExport.py
|
MetroSTL/MapMakerETL-Pipeline
|
931403ed3c5b1d7ed3872d122965afad4e2cded7
|
[
"MIT"
] | null | null | null |
OriginalMMArcProModelExport.py
|
MetroSTL/MapMakerETL-Pipeline
|
931403ed3c5b1d7ed3872d122965afad4e2cded7
|
[
"MIT"
] | 1
|
2021-03-17T13:50:06.000Z
|
2021-03-17T13:50:06.000Z
|
# -*- coding: utf-8 -*-
"""
Generated by ArcGIS ModelBuilder on : 2021-03-11 18:19:18
"""
import arcpy
from sys import argv
# def Model5(Model_Inputs_gdb="C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Inputs.gdb", Model_Outputs_gdb="C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb", Project_Folder="C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605"): # 02b-MapMaker-Preparation
def Convert(Model_Inputs_gdb, Model_Outputs_gdb, Project_Folder): # 02b-MapMaker-Preparation
# To allow overwriting outputs change overwriteOutput option to True.
arcpy.env.overwriteOutput = False
Field_Map = "ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ADDR_TYPE,0,1;CFCC \"CFCC\" true true false 4 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.CFCC,0,4;DIR_TRAVEL \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.DIR_TRAVEL,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.DIRONSIGN,0,1;DIVIDER \"DIVIDER\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.DIVIDER,0,1;Dom \"Dom\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.Dom,-1,-1;ENH_GEOM \"ENH_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ENH_GEOM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.EXITNAME,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.EXPLICATBL,0,1;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FEAT_ID,-1,-1;FR_SPD_LIM \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FR_SPD_LIM,-1,-1;FROM_LANES \"FROM_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FROM_LANES,-1,-1;FUNC_CLASS \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FUNC_CLASS,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.JUNCTIONNM,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_ADDRFORM,0,2;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_ADDRSCH,0,1;L_AREA_ID \"L_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_AREA_ID,-1,-1;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_NREFADDR,0,10;L_NUMZONES \"L_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_NUMZONES,-1,-1;L_POSTCODE \"L_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_POSTCODE,0,11;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_REFADDR,0,10;LANE_CAT \"LANE_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.LANE_CAT,0,1;LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.LINK_ID,-1,-1;N_SHAPEPNT \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.N_SHAPEPNT,-1,-1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NAMEONRDSN,0,1;NREF_IN_ID \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NREF_IN_ID,-1,-1;NREF_ZLEV \"NREF_ZLEV\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NREF_ZLEV,-1,-1;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NUM_AD_RNG,-1,-1;OneWay \"OneWay\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.OneWay,0,1;PlaceCodeL \"PlaceCodeL\" true true false 4 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceCodeL,-1,-1;PlaceCodeR \"PlaceCodeR\" true true false 4 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceCodeR,-1,-1;PlaceNamL \"PlaceNamL\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceNamL,0,255;PlaceNamR \"PlaceNamR\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceNamR,0,255;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.POSTALNAME,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_ADDRFORM,0,2;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_ADDRSCH,0,1;R_AREA_ID \"R_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_AREA_ID,-1,-1;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_NREFADDR,0,10;R_NUMZONES \"R_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_NUMZONES,-1,-1;R_POSTCODE \"R_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_POSTCODE,0,11;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_REFADDR,0,10;RAMP \"RAMP\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.RAMP,0,1;REF_IN_ID \"REF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.REF_IN_ID,-1,-1;REF_ZLEV \"REF_ZLEV\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.REF_ZLEV,-1,-1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ROUTE_TYPE,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.SCENIC_NM,0,1;Speed \"Speed\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.Speed,-1,-1;SPEED_CAT \"SPEED_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.SPEED_CAT,0,1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_LANGCD,0,3;ST_NAME \"ST_NAME\" true true false 240 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NAME,0,240;ST_NM_BASE \"ST_NM_BASE\" true true false 105 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NM_BASE,0,105;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NM_PREF,0,6;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 90 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_TYP_AFT,0,90;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_TYP_ATT,0,1;ST_TYP_BEF \"ST_TYP_BEF\" true true false 90 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_TYP_BEF,0,90;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.STALENAME,0,1;StateAbbrL \"StateAbbrL\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateAbbrL,0,2;StateAbbrR \"StateAbbrR\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateAbbrR,0,2;StateCodeL \"StateCodeL\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateCodeL,-1,-1;StateCodeR \"StateCodeR\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateCodeR,-1,-1;TO_LANES \"TO_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.TO_LANES,-1,-1;TO_SPD_LIM \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.TO_SPD_LIM,-1,-1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.VANITYNAME,0,1;LINK_ID_1 \"LINK_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.LINK_ID,-1,-1;ST_NAME_1 \"ST_NAME\" true true false 120 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NAME,0,120;FEAT_ID_1 \"FEAT_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FEAT_ID,-1,-1;ST_LANGCD_1 \"ST_LANGCD\" true true false 3 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_LANGCD,0,3;NUM_STNMES \"NUM_STNMES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NUM_STNMES,-1,-1;ST_NM_PREF_1 \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NM_PREF,0,6;ST_TYP_BEF_1 \"ST_TYP_BEF\" true true false 50 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_TYP_BEF,0,50;ST_NM_BASE_1 \"ST_NM_BASE\" true true false 70 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NM_BASE,0,70;ST_NM_SUFF_1 \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NM_SUFF,0,6;ST_TYP_AFT_1 \"ST_TYP_AFT\" true true false 50 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_TYP_AFT,0,50;ST_TYP_ATT_1 \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_TYP_ATT,0,1;ADDR_TYPE_1 \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ADDR_TYPE,0,1;L_REFADDR_1 \"L_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_REFADDR,0,10;L_NREFADDR_1 \"L_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_NREFADDR,0,10;L_ADDRSCH_1 \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_ADDRSCH,0,1;L_ADDRFORM_1 \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_ADDRFORM,0,2;R_REFADDR_1 \"R_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_REFADDR,0,10;R_NREFADDR_1 \"R_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_NREFADDR,0,10;R_ADDRSCH_1 \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_ADDRSCH,0,1;R_ADDRFORM_1 \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_ADDRFORM,0,2;REF_IN_ID_1 \"REF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.REF_IN_ID,-1,-1;NREF_IN_ID_1 \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NREF_IN_ID,-1,-1;N_SHAPEPNT_1 \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.N_SHAPEPNT,-1,-1;FUNC_CLASS_1 \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FUNC_CLASS,0,1;SPEED_CAT_1 \"SPEED_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SPEED_CAT,0,1;FR_SPD_LIM_1 \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FR_SPD_LIM,-1,-1;TO_SPD_LIM_1 \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TO_SPD_LIM,-1,-1;TO_LANES_1 \"TO_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TO_LANES,-1,-1;FROM_LANES_1 \"FROM_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FROM_LANES,-1,-1;ENH_GEOM_1 \"ENH_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ENH_GEOM,0,1;LANE_CAT_1 \"LANE_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.LANE_CAT,0,1;DIVIDER_1 \"DIVIDER\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIVIDER,0,1;DIR_TRAVEL_1 \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIR_TRAVEL,0,1;L_AREA_ID_1 \"L_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_AREA_ID,-1,-1;R_AREA_ID_1 \"R_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_AREA_ID,-1,-1;L_POSTCODE_1 \"L_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_POSTCODE,0,11;R_POSTCODE_1 \"R_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_POSTCODE,0,11;L_NUMZONES_1 \"L_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_NUMZONES,-1,-1;R_NUMZONES_1 \"R_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_NUMZONES,-1,-1;NUM_AD_RNG_1 \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NUM_AD_RNG,-1,-1;AR_AUTO \"AR_AUTO\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_AUTO,0,1;AR_BUS \"AR_BUS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_BUS,0,1;AR_TAXIS \"AR_TAXIS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_TAXIS,0,1;AR_CARPOOL \"AR_CARPOOL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_CARPOOL,0,1;AR_PEDEST \"AR_PEDEST\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_PEDEST,0,1;AR_TRUCKS \"AR_TRUCKS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_TRUCKS,0,1;AR_TRAFF \"AR_TRAFF\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_TRAFF,0,1;AR_DELIV \"AR_DELIV\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_DELIV,0,1;AR_EMERVEH \"AR_EMERVEH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_EMERVEH,0,1;AR_MOTOR \"AR_MOTOR\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_MOTOR,0,1;PAVED \"PAVED\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PAVED,0,1;PRIVATE \"PRIVATE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PRIVATE,0,1;FRONTAGE \"FRONTAGE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FRONTAGE,0,1;BRIDGE \"BRIDGE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.BRIDGE,0,1;TUNNEL \"TUNNEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TUNNEL,0,1;RAMP_1 \"RAMP\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.RAMP,0,1;TOLLWAY \"TOLLWAY\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TOLLWAY,0,1;POIACCESS \"POIACCESS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.POIACCESS,0,1;CONTRACC \"CONTRACC\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.CONTRACC,0,1;ROUNDABOUT \"ROUNDABOUT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ROUNDABOUT,0,1;INTERINTER \"INTERINTER\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.INTERINTER,0,1;UNDEFTRAFF \"UNDEFTRAFF\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.UNDEFTRAFF,0,1;FERRY_TYPE \"FERRY_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FERRY_TYPE,0,1;MULTIDIGIT \"MULTIDIGIT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.MULTIDIGIT,0,1;MAXATTR \"MAXATTR\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.MAXATTR,0,1;SPECTRFIG \"SPECTRFIG\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SPECTRFIG,0,1;INDESCRIB \"INDESCRIB\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.INDESCRIB,0,1;MANOEUVRE \"MANOEUVRE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.MANOEUVRE,0,1;DIVIDERLEG \"DIVIDERLEG\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIVIDERLEG,0,1;INPROCDATA \"INPROCDATA\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.INPROCDATA,0,1;FULL_GEOM \"FULL_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FULL_GEOM,0,1;URBAN \"URBAN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.URBAN,0,1;ROUTE_TYPE_1 \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ROUTE_TYPE,0,1;DIRONSIGN_1 \"DIRONSIGN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIRONSIGN,0,1;EXPLICATBL_1 \"EXPLICATBL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXPLICATBL,0,1;NAMEONRDSN_1 \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NAMEONRDSN,0,1;POSTALNAME_1 \"POSTALNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.POSTALNAME,0,1;STALENAME_1 \"STALENAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.STALENAME,0,1;VANITYNAME_1 \"VANITYNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.VANITYNAME,0,1;JUNCTIONNM_1 \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.JUNCTIONNM,0,1;EXITNAME_1 \"EXITNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXITNAME,0,1;SCENIC_RT \"SCENIC_RT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SCENIC_RT,0,1;SCENIC_NM_1 \"SCENIC_NM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SCENIC_NM,0,1;FOURWHLDR \"FOURWHLDR\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FOURWHLDR,0,1;COVERIND \"COVERIND\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.COVERIND,0,2;PLOT_ROAD \"PLOT_ROAD\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PLOT_ROAD,0,1;REVERSIBLE \"REVERSIBLE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.REVERSIBLE,0,1;EXPR_LANE \"EXPR_LANE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXPR_LANE,0,1;CARPOOLRD \"CARPOOLRD\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.CARPOOLRD,0,1;PHYS_LANES \"PHYS_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PHYS_LANES,-1,-1;VER_TRANS \"VER_TRANS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.VER_TRANS,0,1;PUB_ACCESS \"PUB_ACCESS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PUB_ACCESS,0,1;LOW_MBLTY \"LOW_MBLTY\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.LOW_MBLTY,0,1;PRIORITYRD \"PRIORITYRD\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PRIORITYRD,0,1;SPD_LM_SRC \"SPD_LM_SRC\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SPD_LM_SRC,0,2;EXPAND_INC \"EXPAND_INC\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXPAND_INC,0,1;TRANS_AREA \"TRANS_AREA\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TRANS_AREA,0,1;REF_ZLEV_1 \"REF_ZLEV\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.REF_ZLEV,-1,-1;NREF_ZLEV_1 \"NREF_ZLEV\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NREF_ZLEV,-1,-1;PlaceCodeL_1 \"PlaceCodeL\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceCodeL,-1,-1;PlaceCodeR_1 \"PlaceCodeR\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceCodeR,-1,-1;PlaceNamL_1 \"PlaceNamL\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceNamL,0,255;PlaceNamR_1 \"PlaceNamR\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceNamR,0,255;StateCodeL_1 \"StateCodeL\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateCodeL,-1,-1;StateCodeR_1 \"StateCodeR\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateCodeR,-1,-1;StateAbbrL_1 \"StateAbbrL\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateAbbrL,0,255;StateAbbrR_1 \"StateAbbrR\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateAbbrR,0,255;OneWay_1 \"OneWay\" true true false 0 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.OneWay,-1,-1;Speed_1 \"Speed\" true true false 0 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.Speed,-1,-1;CFCC_1 \"CFCC\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.CFCC,0,255;M_LINK_ID \"M_LINK_ID\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.M_LINK_ID,-1,-1;OLD_LINK_ID \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.OLD_LINK_ID,-1,-1"
Field_Map_2_ = "ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ADDR_TYPE,0,1;CFCC \"CFCC\" true true false 4 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.CFCC,0,4;DIR_TRAVEL \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.DIR_TRAVEL,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.DIRONSIGN,0,1;DIVIDER \"DIVIDER\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.DIVIDER,0,1;Dom \"Dom\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.Dom,-1,-1;ENH_GEOM \"ENH_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ENH_GEOM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.EXITNAME,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.EXPLICATBL,0,1;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FEAT_ID,-1,-1;FR_SPD_LIM \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FR_SPD_LIM,-1,-1;FROM_LANES \"FROM_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FROM_LANES,-1,-1;FUNC_CLASS \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.FUNC_CLASS,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.JUNCTIONNM,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_ADDRFORM,0,2;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_ADDRSCH,0,1;L_AREA_ID \"L_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_AREA_ID,-1,-1;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_NREFADDR,0,10;L_NUMZONES \"L_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_NUMZONES,-1,-1;L_POSTCODE \"L_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_POSTCODE,0,11;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.L_REFADDR,0,10;LANE_CAT \"LANE_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.LANE_CAT,0,1;LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.LINK_ID,-1,-1;N_SHAPEPNT \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.N_SHAPEPNT,-1,-1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NAMEONRDSN,0,1;NREF_IN_ID \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NREF_IN_ID,-1,-1;NREF_ZLEV \"NREF_ZLEV\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NREF_ZLEV,-1,-1;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.NUM_AD_RNG,-1,-1;OneWay \"OneWay\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.OneWay,0,1;PlaceCodeL \"PlaceCodeL\" true true false 4 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceCodeL,-1,-1;PlaceCodeR \"PlaceCodeR\" true true false 4 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceCodeR,-1,-1;PlaceNamL \"PlaceNamL\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceNamL,0,255;PlaceNamR \"PlaceNamR\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.PlaceNamR,0,255;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.POSTALNAME,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_ADDRFORM,0,2;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_ADDRSCH,0,1;R_AREA_ID \"R_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_AREA_ID,-1,-1;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_NREFADDR,0,10;R_NUMZONES \"R_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_NUMZONES,-1,-1;R_POSTCODE \"R_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_POSTCODE,0,11;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.R_REFADDR,0,10;RAMP \"RAMP\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.RAMP,0,1;REF_IN_ID \"REF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.REF_IN_ID,-1,-1;REF_ZLEV \"REF_ZLEV\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.REF_ZLEV,-1,-1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ROUTE_TYPE,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.SCENIC_NM,0,1;Speed \"Speed\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.Speed,-1,-1;SPEED_CAT \"SPEED_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.SPEED_CAT,0,1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_LANGCD,0,3;ST_NAME \"ST_NAME\" true true false 240 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NAME,0,240;ST_NM_BASE \"ST_NM_BASE\" true true false 105 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NM_BASE,0,105;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NM_PREF,0,6;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 90 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_TYP_AFT,0,90;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_TYP_ATT,0,1;ST_TYP_BEF \"ST_TYP_BEF\" true true false 90 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.ST_TYP_BEF,0,90;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.STALENAME,0,1;StateAbbrL \"StateAbbrL\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateAbbrL,0,2;StateAbbrR \"StateAbbrR\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateAbbrR,0,2;StateCodeL \"StateCodeL\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateCodeL,-1,-1;StateCodeR \"StateCodeR\" true true false 2 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.StateCodeR,-1,-1;TO_LANES \"TO_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.TO_LANES,-1,-1;TO_SPD_LIM \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.TO_SPD_LIM,-1,-1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,AltStreets_Layer_Dissolve1.VANITYNAME,0,1;LINK_ID_1 \"LINK_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.LINK_ID,-1,-1;ST_NAME_1 \"ST_NAME\" true true false 120 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NAME,0,120;FEAT_ID_1 \"FEAT_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FEAT_ID,-1,-1;ST_LANGCD_1 \"ST_LANGCD\" true true false 3 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_LANGCD,0,3;NUM_STNMES \"NUM_STNMES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NUM_STNMES,-1,-1;ST_NM_PREF_1 \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NM_PREF,0,6;ST_TYP_BEF_1 \"ST_TYP_BEF\" true true false 50 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_TYP_BEF,0,50;ST_NM_BASE_1 \"ST_NM_BASE\" true true false 70 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NM_BASE,0,70;ST_NM_SUFF_1 \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_NM_SUFF,0,6;ST_TYP_AFT_1 \"ST_TYP_AFT\" true true false 50 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_TYP_AFT,0,50;ST_TYP_ATT_1 \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ST_TYP_ATT,0,1;ADDR_TYPE_1 \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ADDR_TYPE,0,1;L_REFADDR_1 \"L_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_REFADDR,0,10;L_NREFADDR_1 \"L_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_NREFADDR,0,10;L_ADDRSCH_1 \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_ADDRSCH,0,1;L_ADDRFORM_1 \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_ADDRFORM,0,2;R_REFADDR_1 \"R_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_REFADDR,0,10;R_NREFADDR_1 \"R_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_NREFADDR,0,10;R_ADDRSCH_1 \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_ADDRSCH,0,1;R_ADDRFORM_1 \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_ADDRFORM,0,2;REF_IN_ID_1 \"REF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.REF_IN_ID,-1,-1;NREF_IN_ID_1 \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NREF_IN_ID,-1,-1;N_SHAPEPNT_1 \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.N_SHAPEPNT,-1,-1;FUNC_CLASS_1 \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FUNC_CLASS,0,1;SPEED_CAT_1 \"SPEED_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SPEED_CAT,0,1;FR_SPD_LIM_1 \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FR_SPD_LIM,-1,-1;TO_SPD_LIM_1 \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TO_SPD_LIM,-1,-1;TO_LANES_1 \"TO_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TO_LANES,-1,-1;FROM_LANES_1 \"FROM_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FROM_LANES,-1,-1;ENH_GEOM_1 \"ENH_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ENH_GEOM,0,1;LANE_CAT_1 \"LANE_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.LANE_CAT,0,1;DIVIDER_1 \"DIVIDER\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIVIDER,0,1;DIR_TRAVEL_1 \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIR_TRAVEL,0,1;L_AREA_ID_1 \"L_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_AREA_ID,-1,-1;R_AREA_ID_1 \"R_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_AREA_ID,-1,-1;L_POSTCODE_1 \"L_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_POSTCODE,0,11;R_POSTCODE_1 \"R_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_POSTCODE,0,11;L_NUMZONES_1 \"L_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.L_NUMZONES,-1,-1;R_NUMZONES_1 \"R_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.R_NUMZONES,-1,-1;NUM_AD_RNG_1 \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NUM_AD_RNG,-1,-1;AR_AUTO \"AR_AUTO\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_AUTO,0,1;AR_BUS \"AR_BUS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_BUS,0,1;AR_TAXIS \"AR_TAXIS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_TAXIS,0,1;AR_CARPOOL \"AR_CARPOOL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_CARPOOL,0,1;AR_PEDEST \"AR_PEDEST\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_PEDEST,0,1;AR_TRUCKS \"AR_TRUCKS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_TRUCKS,0,1;AR_TRAFF \"AR_TRAFF\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_TRAFF,0,1;AR_DELIV \"AR_DELIV\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_DELIV,0,1;AR_EMERVEH \"AR_EMERVEH\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_EMERVEH,0,1;AR_MOTOR \"AR_MOTOR\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.AR_MOTOR,0,1;PAVED \"PAVED\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PAVED,0,1;PRIVATE \"PRIVATE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PRIVATE,0,1;FRONTAGE \"FRONTAGE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FRONTAGE,0,1;BRIDGE \"BRIDGE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.BRIDGE,0,1;TUNNEL \"TUNNEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TUNNEL,0,1;RAMP_1 \"RAMP\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.RAMP,0,1;TOLLWAY \"TOLLWAY\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TOLLWAY,0,1;POIACCESS \"POIACCESS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.POIACCESS,0,1;CONTRACC \"CONTRACC\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.CONTRACC,0,1;ROUNDABOUT \"ROUNDABOUT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ROUNDABOUT,0,1;INTERINTER \"INTERINTER\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.INTERINTER,0,1;UNDEFTRAFF \"UNDEFTRAFF\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.UNDEFTRAFF,0,1;FERRY_TYPE \"FERRY_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FERRY_TYPE,0,1;MULTIDIGIT \"MULTIDIGIT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.MULTIDIGIT,0,1;MAXATTR \"MAXATTR\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.MAXATTR,0,1;SPECTRFIG \"SPECTRFIG\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SPECTRFIG,0,1;INDESCRIB \"INDESCRIB\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.INDESCRIB,0,1;MANOEUVRE \"MANOEUVRE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.MANOEUVRE,0,1;DIVIDERLEG \"DIVIDERLEG\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIVIDERLEG,0,1;INPROCDATA \"INPROCDATA\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.INPROCDATA,0,1;FULL_GEOM \"FULL_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FULL_GEOM,0,1;URBAN \"URBAN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.URBAN,0,1;ROUTE_TYPE_1 \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.ROUTE_TYPE,0,1;DIRONSIGN_1 \"DIRONSIGN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.DIRONSIGN,0,1;EXPLICATBL_1 \"EXPLICATBL\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXPLICATBL,0,1;NAMEONRDSN_1 \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NAMEONRDSN,0,1;POSTALNAME_1 \"POSTALNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.POSTALNAME,0,1;STALENAME_1 \"STALENAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.STALENAME,0,1;VANITYNAME_1 \"VANITYNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.VANITYNAME,0,1;JUNCTIONNM_1 \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.JUNCTIONNM,0,1;EXITNAME_1 \"EXITNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXITNAME,0,1;SCENIC_RT \"SCENIC_RT\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SCENIC_RT,0,1;SCENIC_NM_1 \"SCENIC_NM\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SCENIC_NM,0,1;FOURWHLDR \"FOURWHLDR\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.FOURWHLDR,0,1;COVERIND \"COVERIND\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.COVERIND,0,2;PLOT_ROAD \"PLOT_ROAD\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PLOT_ROAD,0,1;REVERSIBLE \"REVERSIBLE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.REVERSIBLE,0,1;EXPR_LANE \"EXPR_LANE\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXPR_LANE,0,1;CARPOOLRD \"CARPOOLRD\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.CARPOOLRD,0,1;PHYS_LANES \"PHYS_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PHYS_LANES,-1,-1;VER_TRANS \"VER_TRANS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.VER_TRANS,0,1;PUB_ACCESS \"PUB_ACCESS\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PUB_ACCESS,0,1;LOW_MBLTY \"LOW_MBLTY\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.LOW_MBLTY,0,1;PRIORITYRD \"PRIORITYRD\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PRIORITYRD,0,1;SPD_LM_SRC \"SPD_LM_SRC\" true true false 2 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.SPD_LM_SRC,0,2;EXPAND_INC \"EXPAND_INC\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.EXPAND_INC,0,1;TRANS_AREA \"TRANS_AREA\" true true false 1 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.TRANS_AREA,0,1;REF_ZLEV_1 \"REF_ZLEV\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.REF_ZLEV,-1,-1;NREF_ZLEV_1 \"NREF_ZLEV\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.NREF_ZLEV,-1,-1;PlaceCodeL_1 \"PlaceCodeL\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceCodeL,-1,-1;PlaceCodeR_1 \"PlaceCodeR\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceCodeR,-1,-1;PlaceNamL_1 \"PlaceNamL\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceNamL,0,255;PlaceNamR_1 \"PlaceNamR\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.PlaceNamR,0,255;StateCodeL_1 \"StateCodeL\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateCodeL,-1,-1;StateCodeR_1 \"StateCodeR\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateCodeR,-1,-1;StateAbbrL_1 \"StateAbbrL\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateAbbrL,0,255;StateAbbrR_1 \"StateAbbrR\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.StateAbbrR,0,255;OneWay_1 \"OneWay\" true true false 0 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.OneWay,-1,-1;Speed_1 \"Speed\" true true false 0 Short 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.Speed,-1,-1;CFCC_1 \"CFCC\" true true false 255 Text 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.CFCC,0,255;M_LINK_ID \"M_LINK_ID\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.M_LINK_ID,-1,-1;OLD_LINK_ID \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,AltStreets_Layer_Dissolve1_L,Streets_table.OLD_LINK_ID,-1,-1"
# Process: Select_Data_7_ (Select Data)
# Select Data Utility is not implemented
# Process: Simplify Line (2) (Simplify Line)
AltStreets_4_ = fr"{Model_Outputs_gdb}\AltStreets"
with arcpy.EnvManager(transferGDBAttributeProperties=False):
AltStreets_Pnt = arcpy.SimplifyLine_cartography(in_features=AltStreets_3_, out_feature_class=AltStreets_4_, algorithm="POINT_REMOVE", tolerance="5 Feet", error_resolving_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS", error_checking_option="CHECK", in_barriers=[])[0]
# Process: Add Fields (4) (Add Fields (multiple))
AltStreets = arcpy.AddFields_management(in_table=AltStreets_4_, field_description=[["REF_ZLEV", "SHORT", "", "", "", ""], ["DOM", "LONG", "", "", "", ""]])[0]
# Process: Make Feature Layer (5) (Make Feature Layer)
AltStreets_Layer_5_ = "AltStreets_Layer"
arcpy.MakeFeatureLayer_management(in_features=AltStreets, out_layer=AltStreets_Layer_5_, where_clause="", workspace=Model_Outputs_gdb, field_info="OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;LINK_ID LINK_ID VISIBLE NONE;ST_NAME ST_NAME VISIBLE NONE;FEAT_ID FEAT_ID VISIBLE NONE;ST_LANGCD ST_LANGCD VISIBLE NONE;ST_NM_PREF ST_NM_PREF VISIBLE NONE;ST_TYP_BEF ST_TYP_BEF VISIBLE NONE;ST_NM_BASE ST_NM_BASE VISIBLE NONE;ST_NM_SUFF ST_NM_SUFF VISIBLE NONE;ST_TYP_AFT ST_TYP_AFT VISIBLE NONE;ST_TYP_ATT ST_TYP_ATT VISIBLE NONE;ADDR_TYPE ADDR_TYPE VISIBLE NONE;L_REFADDR L_REFADDR VISIBLE NONE;L_NREFADDR L_NREFADDR VISIBLE NONE;L_ADDRSCH L_ADDRSCH VISIBLE NONE;L_ADDRFORM L_ADDRFORM VISIBLE NONE;R_REFADDR R_REFADDR VISIBLE NONE;R_NREFADDR R_NREFADDR VISIBLE NONE;R_ADDRSCH R_ADDRSCH VISIBLE NONE;R_ADDRFORM R_ADDRFORM VISIBLE NONE;NUM_AD_RNG NUM_AD_RNG VISIBLE NONE;ROUTE_TYPE ROUTE_TYPE VISIBLE NONE;DIRONSIGN DIRONSIGN VISIBLE NONE;EXPLICATBL EXPLICATBL VISIBLE NONE;NAMEONRDSN NAMEONRDSN VISIBLE NONE;POSTALNAME POSTALNAME VISIBLE NONE;STALENAME STALENAME VISIBLE NONE;VANITYNAME VANITYNAME VISIBLE NONE;JUNCTIONNM JUNCTIONNM VISIBLE NONE;EXITNAME EXITNAME VISIBLE NONE;SCENIC_NM SCENIC_NM VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;InLine_FID InLine_FID VISIBLE NONE;SimLnFlag SimLnFlag VISIBLE NONE;MaxSimpTol MaxSimpTol VISIBLE NONE;MinSimpTol MinSimpTol VISIBLE NONE;REF_ZLEV REF_ZLEV VISIBLE NONE;DOM DOM VISIBLE NONE")
# Process: Add Attribute Index (Add Attribute Index)
AltStreets_Layer_8_ = arcpy.AddIndex_management(in_table=AltStreets_Layer_5_, fields=["LINK_ID"], index_name="LINK_ID", unique="NON_UNIQUE", ascending="ASCENDING")[0]
# Process: Select_Data_5_ (Select Data)
# Select Data Utility is not implemented
# Process: Simplify Line (Simplify Line)
Streets = fr"{Model_Outputs_gdb}\Streets"
with arcpy.EnvManager(transferGDBAttributeProperties=False):
Streets_Pnt = arcpy.SimplifyLine_cartography(in_features=Streets_3_, out_feature_class=Streets, algorithm="POINT_REMOVE", tolerance="5 Feet", error_resolving_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS", error_checking_option="CHECK", in_barriers=[])[0]
# Process: Add Fields (3) (Add Fields (multiple))
Streets_4_ = arcpy.AddFields_management(in_table=Streets, field_description=[["REF_ZLEV", "LONG", "", "", "", ""], ["NREF_ZLEV", "LONG", "", "", "", ""], ["PlaceCodeL", "LONG", "", "", "", ""], ["PlaceCodeR", "LONG", "", "", "", ""], ["PlaceNamL", "TEXT", "", "255", "", ""], ["PlaceNamR", "TEXT", "", "255", "", ""], ["StateCodeL", "LONG", "", "", "", ""], ["StateCodeR", "LONG", "", "", "", ""], ["StateAbbrL", "TEXT", "", "255", "", ""], ["StateAbbrR", "TEXT", "", "255", "", ""], ["OneWay", "SHORT", "", "", "", ""], ["Speed", "LONG", "", "", "", ""], ["CFCC", "TEXT", "", "255", "", ""], ["M_LINK_ID", "LONG", "", "", "", ""], ["OLD_LINK_ID", "LONG", "", "", "", ""]])[0]
# Process: Make Feature Layer (4) (Make Feature Layer)
Streets_Layer_3_ = "Streets_Layer"
arcpy.MakeFeatureLayer_management(in_features=Streets_4_, out_layer=Streets_Layer_3_, where_clause="", workspace=Model_Outputs_gdb, field_info="OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;LINK_ID LINK_ID VISIBLE NONE;ST_NAME ST_NAME VISIBLE NONE;FEAT_ID FEAT_ID VISIBLE NONE;ST_LANGCD ST_LANGCD VISIBLE NONE;NUM_STNMES NUM_STNMES VISIBLE NONE;ST_NM_PREF ST_NM_PREF VISIBLE NONE;ST_TYP_BEF ST_TYP_BEF VISIBLE NONE;ST_NM_BASE ST_NM_BASE VISIBLE NONE;ST_NM_SUFF ST_NM_SUFF VISIBLE NONE;ST_TYP_AFT ST_TYP_AFT VISIBLE NONE;ST_TYP_ATT ST_TYP_ATT VISIBLE NONE;ADDR_TYPE ADDR_TYPE VISIBLE NONE;L_REFADDR L_REFADDR VISIBLE NONE;L_NREFADDR L_NREFADDR VISIBLE NONE;L_ADDRSCH L_ADDRSCH VISIBLE NONE;L_ADDRFORM L_ADDRFORM VISIBLE NONE;R_REFADDR R_REFADDR VISIBLE NONE;R_NREFADDR R_NREFADDR VISIBLE NONE;R_ADDRSCH R_ADDRSCH VISIBLE NONE;R_ADDRFORM R_ADDRFORM VISIBLE NONE;REF_IN_ID REF_IN_ID VISIBLE NONE;NREF_IN_ID NREF_IN_ID VISIBLE NONE;N_SHAPEPNT N_SHAPEPNT VISIBLE NONE;FUNC_CLASS FUNC_CLASS VISIBLE NONE;SPEED_CAT SPEED_CAT VISIBLE NONE;FR_SPD_LIM FR_SPD_LIM VISIBLE NONE;TO_SPD_LIM TO_SPD_LIM VISIBLE NONE;TO_LANES TO_LANES VISIBLE NONE;FROM_LANES FROM_LANES VISIBLE NONE;ENH_GEOM ENH_GEOM VISIBLE NONE;LANE_CAT LANE_CAT VISIBLE NONE;DIVIDER DIVIDER VISIBLE NONE;DIR_TRAVEL DIR_TRAVEL VISIBLE NONE;L_AREA_ID L_AREA_ID VISIBLE NONE;R_AREA_ID R_AREA_ID VISIBLE NONE;L_POSTCODE L_POSTCODE VISIBLE NONE;R_POSTCODE R_POSTCODE VISIBLE NONE;L_NUMZONES L_NUMZONES VISIBLE NONE;R_NUMZONES R_NUMZONES VISIBLE NONE;NUM_AD_RNG NUM_AD_RNG VISIBLE NONE;AR_AUTO AR_AUTO VISIBLE NONE;AR_BUS AR_BUS VISIBLE NONE;AR_TAXIS AR_TAXIS VISIBLE NONE;AR_CARPOOL AR_CARPOOL VISIBLE NONE;AR_PEDEST AR_PEDEST VISIBLE NONE;AR_TRUCKS AR_TRUCKS VISIBLE NONE;AR_TRAFF AR_TRAFF VISIBLE NONE;AR_DELIV AR_DELIV VISIBLE NONE;AR_EMERVEH AR_EMERVEH VISIBLE NONE;AR_MOTOR AR_MOTOR VISIBLE NONE;PAVED PAVED VISIBLE NONE;PRIVATE PRIVATE VISIBLE NONE;FRONTAGE FRONTAGE VISIBLE NONE;BRIDGE BRIDGE VISIBLE NONE;TUNNEL TUNNEL VISIBLE NONE;RAMP RAMP VISIBLE NONE;TOLLWAY TOLLWAY VISIBLE NONE;POIACCESS POIACCESS VISIBLE NONE;CONTRACC CONTRACC VISIBLE NONE;ROUNDABOUT ROUNDABOUT VISIBLE NONE;INTERINTER INTERINTER VISIBLE NONE;UNDEFTRAFF UNDEFTRAFF VISIBLE NONE;FERRY_TYPE FERRY_TYPE VISIBLE NONE;MULTIDIGIT MULTIDIGIT VISIBLE NONE;MAXATTR MAXATTR VISIBLE NONE;SPECTRFIG SPECTRFIG VISIBLE NONE;INDESCRIB INDESCRIB VISIBLE NONE;MANOEUVRE MANOEUVRE VISIBLE NONE;DIVIDERLEG DIVIDERLEG VISIBLE NONE;INPROCDATA INPROCDATA VISIBLE NONE;FULL_GEOM FULL_GEOM VISIBLE NONE;URBAN URBAN VISIBLE NONE;ROUTE_TYPE ROUTE_TYPE VISIBLE NONE;DIRONSIGN DIRONSIGN VISIBLE NONE;EXPLICATBL EXPLICATBL VISIBLE NONE;NAMEONRDSN NAMEONRDSN VISIBLE NONE;POSTALNAME POSTALNAME VISIBLE NONE;STALENAME STALENAME VISIBLE NONE;VANITYNAME VANITYNAME VISIBLE NONE;JUNCTIONNM JUNCTIONNM VISIBLE NONE;EXITNAME EXITNAME VISIBLE NONE;SCENIC_RT SCENIC_RT VISIBLE NONE;SCENIC_NM SCENIC_NM VISIBLE NONE;FOURWHLDR FOURWHLDR VISIBLE NONE;COVERIND COVERIND VISIBLE NONE;PLOT_ROAD PLOT_ROAD VISIBLE NONE;REVERSIBLE REVERSIBLE VISIBLE NONE;EXPR_LANE EXPR_LANE VISIBLE NONE;CARPOOLRD CARPOOLRD VISIBLE NONE;PHYS_LANES PHYS_LANES VISIBLE NONE;VER_TRANS VER_TRANS VISIBLE NONE;PUB_ACCESS PUB_ACCESS VISIBLE NONE;LOW_MBLTY LOW_MBLTY VISIBLE NONE;PRIORITYRD PRIORITYRD VISIBLE NONE;SPD_LM_SRC SPD_LM_SRC VISIBLE NONE;EXPAND_INC EXPAND_INC VISIBLE NONE;TRANS_AREA TRANS_AREA VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;InLine_FID InLine_FID VISIBLE NONE;SimLnFlag SimLnFlag VISIBLE NONE;MaxSimpTol MaxSimpTol VISIBLE NONE;MinSimpTol MinSimpTol VISIBLE NONE;REF_ZLEV REF_ZLEV VISIBLE NONE;NREF_ZLEV NREF_ZLEV VISIBLE NONE;PlaceCodeL PlaceCodeL VISIBLE NONE;PlaceCodeR PlaceCodeR VISIBLE NONE;PlaceNamL PlaceNamL VISIBLE NONE;PlaceNamR PlaceNamR VISIBLE NONE;StateCodeL StateCodeL VISIBLE NONE;StateCodeR StateCodeR VISIBLE NONE;StateAbbrL StateAbbrL VISIBLE NONE;StateAbbrR StateAbbrR VISIBLE NONE;OneWay OneWay VISIBLE NONE;Speed Speed VISIBLE NONE;CFCC CFCC VISIBLE NONE;M_LINK_ID M_LINK_ID VISIBLE NONE;OLD_LINK_ID OLD_LINK_ID VISIBLE NONE")
# Process: Add Join (11) (Add Join)
AltStreets_Layer_2_ = arcpy.AddJoin_management(in_layer_or_view=AltStreets_Layer_8_, in_field="LINK_ID", join_table=Streets_Layer_3_, join_field="LINK_ID", join_type="KEEP_ALL")[0]
# Process: Table To Table (2) (Table To Table)
Streets_table_2_ = arcpy.TableToTable_conversion(in_rows=Streets_Layer_3_, out_path=Model_Outputs_gdb, out_name="Streets_table", where_clause="", field_mapping=fr"LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,LINK_ID,-1,-1;ST_NAME \"ST_NAME\" true true false 120 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NAME,0,120;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,FEAT_ID,-1,-1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_LANGCD,0,3;NUM_STNMES \"NUM_STNMES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,NUM_STNMES,-1,-1;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NM_PREF,0,6;ST_TYP_BEF \"ST_TYP_BEF\" true true false 50 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_TYP_BEF,0,50;ST_NM_BASE \"ST_NM_BASE\" true true false 70 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NM_BASE,0,70;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 50 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_TYP_AFT,0,50;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_TYP_ATT,0,1;ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ADDR_TYPE,0,1;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_REFADDR,0,10;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_NREFADDR,0,10;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_ADDRSCH,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_ADDRFORM,0,2;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_REFADDR,0,10;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_NREFADDR,0,10;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_ADDRSCH,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_ADDRFORM,0,2;REF_IN_ID \"REF_IN_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,REF_IN_ID,-1,-1;NREF_IN_ID \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,NREF_IN_ID,-1,-1;N_SHAPEPNT \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,N_SHAPEPNT,-1,-1;FUNC_CLASS \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FUNC_CLASS,0,1;SPEED_CAT \"SPEED_CAT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SPEED_CAT,0,1;FR_SPD_LIM \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,FR_SPD_LIM,-1,-1;TO_SPD_LIM \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,TO_SPD_LIM,-1,-1;TO_LANES \"TO_LANES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,TO_LANES,-1,-1;FROM_LANES \"FROM_LANES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,FROM_LANES,-1,-1;ENH_GEOM \"ENH_GEOM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ENH_GEOM,0,1;LANE_CAT \"LANE_CAT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,LANE_CAT,0,1;DIVIDER \"DIVIDER\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIVIDER,0,1;DIR_TRAVEL \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIR_TRAVEL,0,1;L_AREA_ID \"L_AREA_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,L_AREA_ID,-1,-1;R_AREA_ID \"R_AREA_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,R_AREA_ID,-1,-1;L_POSTCODE \"L_POSTCODE\" true true false 11 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_POSTCODE,0,11;R_POSTCODE \"R_POSTCODE\" true true false 11 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_POSTCODE,0,11;L_NUMZONES \"L_NUMZONES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,L_NUMZONES,-1,-1;R_NUMZONES \"R_NUMZONES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,R_NUMZONES,-1,-1;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,NUM_AD_RNG,-1,-1;AR_AUTO \"AR_AUTO\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_AUTO,0,1;AR_BUS \"AR_BUS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_BUS,0,1;AR_TAXIS \"AR_TAXIS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_TAXIS,0,1;AR_CARPOOL \"AR_CARPOOL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_CARPOOL,0,1;AR_PEDEST \"AR_PEDEST\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_PEDEST,0,1;AR_TRUCKS \"AR_TRUCKS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_TRUCKS,0,1;AR_TRAFF \"AR_TRAFF\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_TRAFF,0,1;AR_DELIV \"AR_DELIV\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_DELIV,0,1;AR_EMERVEH \"AR_EMERVEH\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_EMERVEH,0,1;AR_MOTOR \"AR_MOTOR\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_MOTOR,0,1;PAVED \"PAVED\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PAVED,0,1;PRIVATE \"PRIVATE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PRIVATE,0,1;FRONTAGE \"FRONTAGE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FRONTAGE,0,1;BRIDGE \"BRIDGE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,BRIDGE,0,1;TUNNEL \"TUNNEL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,TUNNEL,0,1;RAMP \"RAMP\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,RAMP,0,1;TOLLWAY \"TOLLWAY\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,TOLLWAY,0,1;POIACCESS \"POIACCESS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,POIACCESS,0,1;CONTRACC \"CONTRACC\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,CONTRACC,0,1;ROUNDABOUT \"ROUNDABOUT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ROUNDABOUT,0,1;INTERINTER \"INTERINTER\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,INTERINTER,0,1;UNDEFTRAFF \"UNDEFTRAFF\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,UNDEFTRAFF,0,1;FERRY_TYPE \"FERRY_TYPE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FERRY_TYPE,0,1;MULTIDIGIT \"MULTIDIGIT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,MULTIDIGIT,0,1;MAXATTR \"MAXATTR\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,MAXATTR,0,1;SPECTRFIG \"SPECTRFIG\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SPECTRFIG,0,1;INDESCRIB \"INDESCRIB\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,INDESCRIB,0,1;MANOEUVRE \"MANOEUVRE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,MANOEUVRE,0,1;DIVIDERLEG \"DIVIDERLEG\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIVIDERLEG,0,1;INPROCDATA \"INPROCDATA\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,INPROCDATA,0,1;FULL_GEOM \"FULL_GEOM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FULL_GEOM,0,1;URBAN \"URBAN\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,URBAN,0,1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ROUTE_TYPE,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIRONSIGN,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXPLICATBL,0,1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,NAMEONRDSN,0,1;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,POSTALNAME,0,1;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,STALENAME,0,1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,VANITYNAME,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,JUNCTIONNM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXITNAME,0,1;SCENIC_RT \"SCENIC_RT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SCENIC_RT,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SCENIC_NM,0,1;FOURWHLDR \"FOURWHLDR\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FOURWHLDR,0,1;COVERIND \"COVERIND\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,COVERIND,0,2;PLOT_ROAD \"PLOT_ROAD\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PLOT_ROAD,0,1;REVERSIBLE \"REVERSIBLE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,REVERSIBLE,0,1;EXPR_LANE \"EXPR_LANE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXPR_LANE,0,1;CARPOOLRD \"CARPOOLRD\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,CARPOOLRD,0,1;PHYS_LANES \"PHYS_LANES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,PHYS_LANES,-1,-1;VER_TRANS \"VER_TRANS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,VER_TRANS,0,1;PUB_ACCESS \"PUB_ACCESS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PUB_ACCESS,0,1;LOW_MBLTY \"LOW_MBLTY\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,LOW_MBLTY,0,1;PRIORITYRD \"PRIORITYRD\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PRIORITYRD,0,1;SPD_LM_SRC \"SPD_LM_SRC\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SPD_LM_SRC,0,2;EXPAND_INC \"EXPAND_INC\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXPAND_INC,0,1;TRANS_AREA \"TRANS_AREA\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,TRANS_AREA,0,1;Shape_Length \"Shape_Length\" false true true 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,Shape_Length,-1,-1;InLine_FID \"InLine_FID\" true true false 4 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,InLine_FID,-1,-1;SimLnFlag \"SimLnFlag\" true true false 2 Short 0 0,First,#,{Model_Outputs_gdb}\Streets,SimLnFlag,-1,-1;MaxSimpTol \"MaxSimpTol\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,MaxSimpTol,-1,-1;MinSimpTol \"MinSimpTol\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,MinSimpTol,-1,-1;REF_ZLEV \"REF_ZLEV\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,REF_ZLEV,-1,-1;NREF_ZLEV \"NREF_ZLEV\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,NREF_ZLEV,-1,-1;PlaceCodeL \"PlaceCodeL\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceCodeL,-1,-1;PlaceCodeR \"PlaceCodeR\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceCodeR,-1,-1;PlaceNamL \"PlaceNamL\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceNamL,0,255;PlaceNamR \"PlaceNamR\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceNamR,0,255;StateCodeL \"StateCodeL\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,StateCodeL,-1,-1;StateCodeR \"StateCodeR\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,StateCodeR,-1,-1;StateAbbrL \"StateAbbrL\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,StateAbbrL,0,255;StateAbbrR \"StateAbbrR\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,StateAbbrR,0,255;OneWay \"OneWay\" true true false 0 Short 0 0,First,#,{Model_Outputs_gdb}\Streets,OneWay,-1,-1;Speed \"Speed\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,Speed,-1,-1;CFCC \"CFCC\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,CFCC,0,255;M_LINK_ID \"M_LINK_ID\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,M_LINK_ID,-1,-1;OLD_LINK_ID \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,OLD_LINK_ID,-1,-1", config_keyword="")[0]
# Process: Join Field (Join Field)
AltStreets_Layer_4_ = arcpy.JoinField_management(in_data=AltStreets_Layer_8_, in_field="LINK_ID", join_table=Streets_table_2_, join_field="LINK_ID", fields=["NUM_STNMES", "ST_NAME"])[0]
# Process: Feature Class to Feature Class (6) (Feature Class to Feature Class)
AltStreets_Layer_join = arcpy.FeatureClassToFeatureClass_conversion(in_features=AltStreets_Layer_4_, out_path=Model_Outputs_gdb, out_name="AltStreets_Layer_join", where_clause="Streets.NUM_STNMES > 1", field_mapping="LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,LINK_ID,-1,-1;ST_NAME \"ST_NAME\" true true false 240 Text 0 0,First,#,AltStreets_Layer,ST_NAME,0,240,AltStreets_Layer,ST_NAME,0,120;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,FEAT_ID,-1,-1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,AltStreets_Layer,ST_LANGCD,0,3;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,AltStreets_Layer,ST_NM_PREF,0,6;ST_TYP_BEF \"ST_TYP_BEF\" true true false 90 Text 0 0,First,#,AltStreets_Layer,ST_TYP_BEF,0,90;ST_NM_BASE \"ST_NM_BASE\" true true false 105 Text 0 0,First,#,AltStreets_Layer,ST_NM_BASE,0,105;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,AltStreets_Layer,ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 90 Text 0 0,First,#,AltStreets_Layer,ST_TYP_AFT,0,90;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,AltStreets_Layer,ST_TYP_ATT,0,1;ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer,ADDR_TYPE,0,1;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,L_REFADDR,0,10;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,L_NREFADDR,0,10;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer,L_ADDRSCH,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer,L_ADDRFORM,0,2;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,R_REFADDR,0,10;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,R_NREFADDR,0,10;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer,R_ADDRSCH,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer,R_ADDRFORM,0,2;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,AltStreets_Layer,NUM_AD_RNG,-1,-1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer,ROUTE_TYPE,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,AltStreets_Layer,DIRONSIGN,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,AltStreets_Layer,EXPLICATBL,0,1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,AltStreets_Layer,NAMEONRDSN,0,1;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,POSTALNAME,0,1;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,STALENAME,0,1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,VANITYNAME,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,AltStreets_Layer,JUNCTIONNM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,EXITNAME,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,AltStreets_Layer,SCENIC_NM,0,1;Shape_Length \"Shape_Length\" false true true 8 Double 0 0,First,#,AltStreets_Layer,Shape_Length,-1,-1;InLine_FID \"InLine_FID\" true true false 4 Long 0 0,First,#,AltStreets_Layer,InLine_FID,-1,-1;SimLnFlag \"SimLnFlag\" true true false 2 Short 0 0,First,#,AltStreets_Layer,SimLnFlag,-1,-1;MaxSimpTol \"MaxSimpTol\" true true false 8 Double 0 0,First,#,AltStreets_Layer,MaxSimpTol,-1,-1;MinSimpTol \"MinSimpTol\" true true false 8 Double 0 0,First,#,AltStreets_Layer,MinSimpTol,-1,-1;REF_ZLEV \"REF_ZLEV\" true true false 0 Short 0 0,First,#,AltStreets_Layer,REF_ZLEV,-1,-1;DOM \"DOM\" true true false 0 Long 0 0,First,#,AltStreets_Layer,DOM,-1,-1;NUM_STNMES \"NUM_STNMES\" true true false 8 Double 0 0,First,#,AltStreets_Layer,NUM_STNMES,-1,-1", config_keyword="")[0]
# Process: Summary Statistics (2) (Summary Statistics)
AltStreets_Layer_join_Stats = fr"{Model_Outputs_gdb}\AltStreets_Layer_join_Stats"
arcpy.Statistics_analysis(in_table=AltStreets_Layer_join, out_table=AltStreets_Layer_join_Stats, statistics_fields=[["LINK_ID", "FIRST"]], case_field=["LINK_ID", "ST_NAME"])
# Process: Add Join (12) (Add Join)
AltStreets_Layer_6_ = arcpy.AddJoin_management(in_layer_or_view=AltStreets_Layer_8_, in_field="LINK_ID", join_table=AltStreets_Layer_join_Stats, join_field="LINK_ID", join_type="KEEP_ALL")[0]
# Process: Calculate Field (6) (Calculate Field)
AltStreets_Layer = arcpy.CalculateField_management(in_table=AltStreets_Layer_6_, field="Dom", expression="1", expression_type="PYTHON3", code_block="", field_type="TEXT")[0]
# Process: Calculate Field (8) (Calculate Field)
AltStreets_Layer_7_ = arcpy.CalculateField_management(in_table=AltStreets_Layer, field="REF_ZLEV", expression="-9", expression_type="PYTHON3", code_block="", field_type="TEXT")[0]
# Process: Feature Class to Feature Class (Feature Class to Feature Class)
AltStreets_Layer_3_ = arcpy.FeatureClassToFeatureClass_conversion(in_features=AltStreets_Layer_7_, out_path=Model_Outputs_gdb, out_name="AltStreets_Layer", where_clause="AltStreets_Layer_join_Stats.FIRST_LINK_ID IS NOT NULL", field_mapping="LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,AltStreets.LINK_ID,-1,-1;ST_NAME \"ST_NAME\" true true false 240 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_NAME,0,240;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,AltStreets.FEAT_ID,-1,-1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_LANGCD,0,3;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_NM_PREF,0,6;ST_TYP_BEF \"ST_TYP_BEF\" true true false 90 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_TYP_BEF,0,90;ST_NM_BASE \"ST_NM_BASE\" true true false 105 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_NM_BASE,0,105;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 90 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_TYP_AFT,0,90;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.ST_TYP_ATT,0,1;ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.ADDR_TYPE,0,1;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,AltStreets.L_REFADDR,0,10;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,AltStreets.L_NREFADDR,0,10;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.L_ADDRSCH,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer,AltStreets.L_ADDRFORM,0,2;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,AltStreets.R_REFADDR,0,10;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,AltStreets_Layer,AltStreets.R_NREFADDR,0,10;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.R_ADDRSCH,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,AltStreets_Layer,AltStreets.R_ADDRFORM,0,2;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,AltStreets_Layer,AltStreets.NUM_AD_RNG,-1,-1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.ROUTE_TYPE,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.DIRONSIGN,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.EXPLICATBL,0,1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.NAMEONRDSN,0,1;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.POSTALNAME,0,1;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.STALENAME,0,1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.VANITYNAME,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.JUNCTIONNM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.EXITNAME,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,AltStreets_Layer,AltStreets.SCENIC_NM,0,1;REF_ZLEV \"REF_ZLEV\" true true false 2 Short 0 0,First,#,AltStreets_Layer,AltStreets.REF_ZLEV,-1,-1;Dom \"Dom\" true true false 2 Short 0 0,First,#,AltStreets_Layer,AltStreets.Dom,-1,-1;OBJECTID \"OBJECTID\" false true false 4 Long 0 9,First,#,AltStreets_Layer,Streets.OBJECTID,-1,-1;REF_IN_ID \"REF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.REF_IN_ID,-1,-1;NREF_IN_ID \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.NREF_IN_ID,-1,-1;N_SHAPEPNT \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.N_SHAPEPNT,-1,-1;FUNC_CLASS \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.FUNC_CLASS,0,1;SPEED_CAT \"SPEED_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.SPEED_CAT,0,1;FR_SPD_LIM \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.FR_SPD_LIM,-1,-1;TO_SPD_LIM \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.TO_SPD_LIM,-1,-1;TO_LANES \"TO_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.TO_LANES,-1,-1;FROM_LANES \"FROM_LANES\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.FROM_LANES,-1,-1;ENH_GEOM \"ENH_GEOM\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.ENH_GEOM,0,1;LANE_CAT \"LANE_CAT\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.LANE_CAT,0,1;DIVIDER \"DIVIDER\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.DIVIDER,0,1;DIR_TRAVEL \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.DIR_TRAVEL,0,1;L_AREA_ID \"L_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.L_AREA_ID,-1,-1;R_AREA_ID \"R_AREA_ID\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.R_AREA_ID,-1,-1;L_POSTCODE \"L_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer,Streets.L_POSTCODE,0,11;R_POSTCODE \"R_POSTCODE\" true true false 11 Text 0 0,First,#,AltStreets_Layer,Streets.R_POSTCODE,0,11;L_NUMZONES \"L_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.L_NUMZONES,-1,-1;R_NUMZONES \"R_NUMZONES\" true true false 8 Double 0 0,First,#,AltStreets_Layer,Streets.R_NUMZONES,-1,-1;RAMP \"RAMP\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.RAMP,0,1;NREF_ZLEV \"NREF_ZLEV\" true true false 2 Short 0 0,First,#,AltStreets_Layer,Streets.NREF_ZLEV,-1,-1;PlaceCodeL \"PlaceCodeL\" true true false 4 Long 0 0,First,#,AltStreets_Layer,Streets.PlaceCodeL,-1,-1;PlaceCodeR \"PlaceCodeR\" true true false 4 Long 0 0,First,#,AltStreets_Layer,Streets.PlaceCodeR,-1,-1;PlaceNamL \"PlaceNamL\" true true false 255 Text 0 0,First,#,AltStreets_Layer,Streets.PlaceNamL,0,255;PlaceNamR \"PlaceNamR\" true true false 255 Text 0 0,First,#,AltStreets_Layer,Streets.PlaceNamR,0,255;StateCodeL \"StateCodeL\" true true false 2 Short 0 0,First,#,AltStreets_Layer,Streets.StateCodeL,-1,-1;StateCodeR \"StateCodeR\" true true false 2 Short 0 0,First,#,AltStreets_Layer,Streets.StateCodeR,-1,-1;StateAbbrL \"StateAbbrL\" true true false 2 Text 0 0,First,#,AltStreets_Layer,Streets.StateAbbrL,0,2;StateAbbrR \"StateAbbrR\" true true false 2 Text 0 0,First,#,AltStreets_Layer,Streets.StateAbbrR,0,2;OneWay \"OneWay\" true true false 1 Text 0 0,First,#,AltStreets_Layer,Streets.OneWay,0,1;Speed \"Speed\" true true false 2 Short 0 0,First,#,AltStreets_Layer,Streets.Speed,-1,-1;CFCC \"CFCC\" true true false 4 Text 0 0,First,#,AltStreets_Layer,Streets.CFCC,0,4", config_keyword="")[0]
# Process: Dissolve (Dissolve)
AltStreets_Layer_Dissolve1 = "C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\MapMaker-Conversion-2019.gdb\\AltStreets_Layer_Dissolve1"
arcpy.Dissolve_management(in_features=AltStreets_Layer_3_, out_feature_class=AltStreets_Layer_Dissolve1, dissolve_field=["ADDR_TYPE", "CFCC", "DIR_TRAVEL", "DIRONSIGN", "DIVIDER", "Dom", "ENH_GEOM", "EXITNAME", "EXPLICATBL", "FEAT_ID", "FR_SPD_LIM", "FROM_LANES", "FUNC_CLASS", "JUNCTIONNM", "L_ADDRFORM", "L_ADDRSCH", "L_AREA_ID", "L_NREFADDR", "L_NUMZONES", "L_POSTCODE", "L_REFADDR", "LANE_CAT", "LINK_ID", "N_SHAPEPNT", "NAMEONRDSN", "NREF_IN_ID", "NREF_ZLEV", "NUM_AD_RNG", "OneWay", "PlaceCodeL", "PlaceCodeR", "PlaceNamL", "PlaceNamR", "POSTALNAME", "R_ADDRFORM", "R_ADDRSCH", "R_AREA_ID", "R_NREFADDR", "R_NUMZONES", "R_POSTCODE", "R_REFADDR", "RAMP", "REF_IN_ID", "REF_ZLEV", "ROUTE_TYPE", "SCENIC_NM", "Speed", "SPEED_CAT", "ST_LANGCD", "ST_NAME", "ST_NM_BASE", "ST_NM_PREF", "ST_NM_SUFF", "ST_TYP_AFT", "ST_TYP_ATT", "ST_TYP_BEF", "STALENAME", "StateAbbrL", "StateAbbrR", "StateCodeL", "StateCodeR", "TO_LANES", "TO_SPD_LIM", "VANITYNAME"], statistics_fields=[], multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES")
# Process: Select_Data_8_ (Select Data)
# Select Data Utility is not implemented
# Process: Add Join (13) (Add Join)
Streets_Layer_12_ = arcpy.AddJoin_management(in_layer_or_view=Streets_Layer_3_, in_field="REF_IN_ID", join_table=Zlevels, join_field="NODE_ID", join_type="KEEP_ALL")[0]
# Process: Calculate Field (9) (Calculate Field)
Streets_Layer_11_ = arcpy.CalculateField_management(in_table=Streets_Layer_12_, field="REF_ZLEV", expression="zlevCalc(!Zlevels.Z_LEVEL!)", expression_type="PYTHON3", code_block="def zlevCalc(z):
if(z != 0):
return z
else:
return 0", field_type="TEXT")[0]
# Process: Remove Join (8) (Remove Join)
Streets_Layer_13_ = arcpy.RemoveJoin_management(in_layer_or_view=Streets_Layer_11_, join_name="Zlevels")[0]
# Process: Add Join (8) (Add Join)
Streets_Layer_4_ = arcpy.AddJoin_management(in_layer_or_view=Streets_Layer_13_, in_field="NREF_IN_ID", join_table=Zlevels, join_field="NODE_ID", join_type="KEEP_ALL")[0]
# Process: Calculate Field (10) (Calculate Field)
Streets_Layer_14_ = arcpy.CalculateField_management(in_table=Streets_Layer_4_, field="NREF_ZLEV", expression="zlevCalc(!Zlevels.Z_LEVEL!)", expression_type="PYTHON3", code_block="def zlevCalc(z):
if(z != 0):
return z
else:
return 0", field_type="TEXT")[0]
# Process: Remove Join (9) (Remove Join)
Streets_Layer_15_ = arcpy.RemoveJoin_management(in_layer_or_view=Streets_Layer_14_, join_name="Zlevels")[0]
# Process: Select_Data_9_ (Select Data)
# Select Data Utility is not implemented
# Process: Add Join (10) (Add Join)
Streets_Layer_8_ = arcpy.AddJoin_management(in_layer_or_view=Streets_Layer_15_, in_field="R_AREA_ID", join_table=Adminbndy4, join_field="AREA_ID", join_type="KEEP_ALL")[0]
# Process: Calculate Fields (4) (Calculate Fields (multiple))
Streets_Layer_2_ = arcpy.CalculateFields_management(in_table=Streets_Layer_8_, expression_type="PYTHON3", fields=[["Streets.PlaceCodeR", "!Adminbndy4.AREA_ID!"], ["Streets.PlaceNamR", "!Adminbndy4.POLYGON_NM!"]], code_block="")[0]
# Process: Remove Join (7) (Remove Join)
Streets_Layer_10_ = arcpy.RemoveJoin_management(in_layer_or_view=Streets_Layer_2_, join_name="Adminbndy4")[0]
# Process: Add Join (9) (Add Join)
Streets_Layer_5_ = arcpy.AddJoin_management(in_layer_or_view=Streets_Layer_10_, in_field="L_AREA_ID", join_table=Adminbndy4, join_field="AREA_ID", join_type="KEEP_ALL")[0]
# Process: Calculate Fields (3) (Calculate Fields (multiple))
Streets_Layer_26_ = arcpy.CalculateFields_management(in_table=Streets_Layer_5_, expression_type="PYTHON3", fields=[["Streets.PlaceCodeL", "!Adminbndy4.AREA_ID!"], ["Streets.PlaceNamL", "!Adminbndy4.POLYGON_NM!"]], code_block="")[0]
# Process: Remove Join (6) (Remove Join)
Streets_Layer_7_ = arcpy.RemoveJoin_management(in_layer_or_view=Streets_Layer_26_, join_name="Adminbndy4")[0]
# Process: Calculate Field (11) (Calculate Field)
Streets_Layer_16_ = arcpy.CalculateField_management(in_table=Streets_Layer_7_, field="OneWay", expression="oneWCalc(!DIR_TRAVEL!)", expression_type="PYTHON3", code_block="def oneWCalc(dir):
if(dir == \"T\"):
return \">\"
elif(dir == \"F\"):
return \"<\"
else:
return \"\"", field_type="TEXT")[0]
# Process: Calculate Field (12) (Calculate Field)
Streets_Layer_17_ = arcpy.CalculateField_management(in_table=Streets_Layer_16_, field="Speed", expression="speedCalc(!DIR_TRAVEL!,!TO_SPD_LIM!,!FR_SPD_LIM!)", expression_type="PYTHON3", code_block="def speedCalc(dir, toSpeed, fromSpeed):
if(dir == \"T\"):
return toSpeed
else:
return fromSpeed", field_type="TEXT")[0]
# Process: Calculate Field (13) (Calculate Field)
Streets_Layer_18_ = arcpy.CalculateField_management(in_table=Streets_Layer_17_, field="Speed", expression="nullSpeedCalc(!Speed!, !SPEED_CAT!)", expression_type="PYTHON3", code_block="def nullSpeedCalc(speed, cat):
if(speed is None):
if(cat == '8'):
return 15
elif(cat == '7'):
return 20
elif(cat == '6'):
return 25
elif(cat == '5'):
return 35", field_type="TEXT")[0]
# Process: Calculate Field (14) (Calculate Field)
Streets_Layer_19_ = arcpy.CalculateField_management(in_table=Streets_Layer_18_, field="CFCC", expression="cfccCalc(!FUNC_CLASS!)", expression_type="PYTHON3", code_block="def cfccCalc(fClass):
if(fClass == 1):
return \"A10\"
elif(fClass == 2):
return \"A20\"
elif(fClass == 3):
return \"A30\"
elif(fClass == 4 or fClass == 5):
return \"A40\"", field_type="TEXT")[0]
# Process: Select_Data_10_ (Select Data)
# Select Data Utility is not implemented
# Process: Add Join (14) (Add Join)
Streets_Layer = arcpy.AddJoin_management(in_layer_or_view=Streets_Layer_19_, in_field="L_AREA_ID", join_table=Adminbndy3, join_field="AREA_ID", join_type="KEEP_ALL")[0]
# Process: Calculate Field (15) (Calculate Field)
Streets_Layer_20_ = arcpy.CalculateField_management(in_table=Streets_Layer, field="Streets.PlaceCodeL", expression="placeCodeCalc(!Adminbndy3.POLYGON_NM!)", expression_type="PYTHON3", code_block="def placeCodeCalc(name):
if(name == \"ST LOUIS (CITY)\" or name == \"ST CLAIR\"):
return 29
else:
return 17", field_type="TEXT")[0]
# Process: Calculate Field (16) (Calculate Field)
Streets_Layer_21_ = arcpy.CalculateField_management(in_table=Streets_Layer_20_, field="Streets.PlaceCodeR", expression="placeCodeCalc(!Adminbndy3.POLYGON_NM!)", expression_type="PYTHON3", code_block="def placeCodeCalc(name):
if(name == \"ST LOUIS (CITY)\" or name == \"ST CLAIR\"):
return 29
else:
return 17", field_type="TEXT")[0]
# Process: Calculate Field (17) (Calculate Field)
Streets_Layer_22_ = arcpy.CalculateField_management(in_table=Streets_Layer_21_, field="Streets.StateAbbrL", expression="placeAbbrCalc(!Adminbndy3.POLYGON_NM!)", expression_type="PYTHON3", code_block="def placeAbbrCalc(name):
if(name == \"ST LOUIS (CITY)\" or name == \"ST CLAIR\"):
return \"MO\"
else:
return \"IL\"", field_type="TEXT")[0]
# Process: Calculate Field (24) (Calculate Field)
Streets_Layer_23_ = arcpy.CalculateField_management(in_table=Streets_Layer_22_, field="Streets.StateAbbrR", expression="placeAbbrCalc(!Adminbndy3.POLYGON_NM!)", expression_type="PYTHON3", code_block="def placeAbbrCalc(name):
if(name == \"ST LOUIS (CITY)\" or name == \"ST CLAIR\"):
return \"MO\"
else:
return \"IL\"", field_type="TEXT")[0]
# Process: Remove Join (10) (Remove Join)
Streets_Layer_24_ = arcpy.RemoveJoin_management(in_layer_or_view=Streets_Layer_23_, join_name="Adminbndy3")[0]
# Process: Calculate Fields (multiple) (Calculate Fields (multiple))
Streets_Layer_25_ = arcpy.CalculateFields_management(in_table=Streets_Layer_24_, expression_type="PYTHON3", fields=[["M_LINK_ID", "!OBJECTID!"], ["OLD_LINK_ID", "!LINK_ID!"]], code_block="")[0]
# Process: Table To Table (Table To Table)
Streets_table = arcpy.TableToTable_conversion(in_rows=Streets_Layer_25_, out_path=Model_Outputs_gdb, out_name="Streets_table", where_clause="", field_mapping=fr"LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,LINK_ID,-1,-1;ST_NAME \"ST_NAME\" true true false 120 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NAME,0,120;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,FEAT_ID,-1,-1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_LANGCD,0,3;NUM_STNMES \"NUM_STNMES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,NUM_STNMES,-1,-1;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NM_PREF,0,6;ST_TYP_BEF \"ST_TYP_BEF\" true true false 50 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_TYP_BEF,0,50;ST_NM_BASE \"ST_NM_BASE\" true true false 70 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NM_BASE,0,70;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 50 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_TYP_AFT,0,50;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ST_TYP_ATT,0,1;ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ADDR_TYPE,0,1;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_REFADDR,0,10;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_NREFADDR,0,10;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_ADDRSCH,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_ADDRFORM,0,2;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_REFADDR,0,10;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_NREFADDR,0,10;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_ADDRSCH,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_ADDRFORM,0,2;REF_IN_ID \"REF_IN_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,REF_IN_ID,-1,-1;NREF_IN_ID \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,NREF_IN_ID,-1,-1;N_SHAPEPNT \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,N_SHAPEPNT,-1,-1;FUNC_CLASS \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FUNC_CLASS,0,1;SPEED_CAT \"SPEED_CAT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SPEED_CAT,0,1;FR_SPD_LIM \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,FR_SPD_LIM,-1,-1;TO_SPD_LIM \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,TO_SPD_LIM,-1,-1;TO_LANES \"TO_LANES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,TO_LANES,-1,-1;FROM_LANES \"FROM_LANES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,FROM_LANES,-1,-1;ENH_GEOM \"ENH_GEOM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ENH_GEOM,0,1;LANE_CAT \"LANE_CAT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,LANE_CAT,0,1;DIVIDER \"DIVIDER\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIVIDER,0,1;DIR_TRAVEL \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIR_TRAVEL,0,1;L_AREA_ID \"L_AREA_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,L_AREA_ID,-1,-1;R_AREA_ID \"R_AREA_ID\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,R_AREA_ID,-1,-1;L_POSTCODE \"L_POSTCODE\" true true false 11 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,L_POSTCODE,0,11;R_POSTCODE \"R_POSTCODE\" true true false 11 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,R_POSTCODE,0,11;L_NUMZONES \"L_NUMZONES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,L_NUMZONES,-1,-1;R_NUMZONES \"R_NUMZONES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,R_NUMZONES,-1,-1;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,NUM_AD_RNG,-1,-1;AR_AUTO \"AR_AUTO\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_AUTO,0,1;AR_BUS \"AR_BUS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_BUS,0,1;AR_TAXIS \"AR_TAXIS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_TAXIS,0,1;AR_CARPOOL \"AR_CARPOOL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_CARPOOL,0,1;AR_PEDEST \"AR_PEDEST\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_PEDEST,0,1;AR_TRUCKS \"AR_TRUCKS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_TRUCKS,0,1;AR_TRAFF \"AR_TRAFF\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_TRAFF,0,1;AR_DELIV \"AR_DELIV\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_DELIV,0,1;AR_EMERVEH \"AR_EMERVEH\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_EMERVEH,0,1;AR_MOTOR \"AR_MOTOR\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,AR_MOTOR,0,1;PAVED \"PAVED\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PAVED,0,1;PRIVATE \"PRIVATE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PRIVATE,0,1;FRONTAGE \"FRONTAGE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FRONTAGE,0,1;BRIDGE \"BRIDGE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,BRIDGE,0,1;TUNNEL \"TUNNEL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,TUNNEL,0,1;RAMP \"RAMP\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,RAMP,0,1;TOLLWAY \"TOLLWAY\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,TOLLWAY,0,1;POIACCESS \"POIACCESS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,POIACCESS,0,1;CONTRACC \"CONTRACC\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,CONTRACC,0,1;ROUNDABOUT \"ROUNDABOUT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ROUNDABOUT,0,1;INTERINTER \"INTERINTER\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,INTERINTER,0,1;UNDEFTRAFF \"UNDEFTRAFF\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,UNDEFTRAFF,0,1;FERRY_TYPE \"FERRY_TYPE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FERRY_TYPE,0,1;MULTIDIGIT \"MULTIDIGIT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,MULTIDIGIT,0,1;MAXATTR \"MAXATTR\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,MAXATTR,0,1;SPECTRFIG \"SPECTRFIG\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SPECTRFIG,0,1;INDESCRIB \"INDESCRIB\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,INDESCRIB,0,1;MANOEUVRE \"MANOEUVRE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,MANOEUVRE,0,1;DIVIDERLEG \"DIVIDERLEG\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIVIDERLEG,0,1;INPROCDATA \"INPROCDATA\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,INPROCDATA,0,1;FULL_GEOM \"FULL_GEOM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FULL_GEOM,0,1;URBAN \"URBAN\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,URBAN,0,1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,ROUTE_TYPE,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,DIRONSIGN,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXPLICATBL,0,1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,NAMEONRDSN,0,1;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,POSTALNAME,0,1;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,STALENAME,0,1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,VANITYNAME,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,JUNCTIONNM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXITNAME,0,1;SCENIC_RT \"SCENIC_RT\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SCENIC_RT,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SCENIC_NM,0,1;FOURWHLDR \"FOURWHLDR\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,FOURWHLDR,0,1;COVERIND \"COVERIND\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,COVERIND,0,2;PLOT_ROAD \"PLOT_ROAD\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PLOT_ROAD,0,1;REVERSIBLE \"REVERSIBLE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,REVERSIBLE,0,1;EXPR_LANE \"EXPR_LANE\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXPR_LANE,0,1;CARPOOLRD \"CARPOOLRD\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,CARPOOLRD,0,1;PHYS_LANES \"PHYS_LANES\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,PHYS_LANES,-1,-1;VER_TRANS \"VER_TRANS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,VER_TRANS,0,1;PUB_ACCESS \"PUB_ACCESS\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PUB_ACCESS,0,1;LOW_MBLTY \"LOW_MBLTY\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,LOW_MBLTY,0,1;PRIORITYRD \"PRIORITYRD\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PRIORITYRD,0,1;SPD_LM_SRC \"SPD_LM_SRC\" true true false 2 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,SPD_LM_SRC,0,2;EXPAND_INC \"EXPAND_INC\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,EXPAND_INC,0,1;TRANS_AREA \"TRANS_AREA\" true true false 1 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,TRANS_AREA,0,1;InLine_FID \"InLine_FID\" true true false 4 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,InLine_FID,-1,-1;SimLnFlag \"SimLnFlag\" true true false 2 Short 0 0,First,#,{Model_Outputs_gdb}\Streets,SimLnFlag,-1,-1;MaxSimpTol \"MaxSimpTol\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,MaxSimpTol,-1,-1;MinSimpTol \"MinSimpTol\" true true false 8 Double 0 0,First,#,{Model_Outputs_gdb}\Streets,MinSimpTol,-1,-1;REF_ZLEV \"REF_ZLEV\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,REF_ZLEV,-1,-1;NREF_ZLEV \"NREF_ZLEV\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,NREF_ZLEV,-1,-1;PlaceCodeL \"PlaceCodeL\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceCodeL,-1,-1;PlaceCodeR \"PlaceCodeR\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceCodeR,-1,-1;PlaceNamL \"PlaceNamL\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceNamL,0,255;PlaceNamR \"PlaceNamR\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,PlaceNamR,0,255;StateCodeL \"StateCodeL\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,StateCodeL,-1,-1;StateCodeR \"StateCodeR\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,StateCodeR,-1,-1;StateAbbrL \"StateAbbrL\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,StateAbbrL,0,255;StateAbbrR \"StateAbbrR\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,StateAbbrR,0,255;OneWay \"OneWay\" true true false 0 Short 0 0,First,#,{Model_Outputs_gdb}\Streets,OneWay,-1,-1;Speed \"Speed\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,Speed,-1,-1;CFCC \"CFCC\" true true false 255 Text 0 0,First,#,{Model_Outputs_gdb}\Streets,CFCC,0,255;M_LINK_ID \"M_LINK_ID\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,M_LINK_ID,-1,-1;OLD_LINK_ID \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,{Model_Outputs_gdb}\Streets,OLD_LINK_ID,-1,-1", config_keyword="")[0]
# Process: Add Join (Add Join)
AltStreets_Layer_Dissolve1_L = arcpy.AddJoin_management(in_layer_or_view=AltStreets_Layer_Dissolve1, in_field="LINK_ID", join_table=Streets_table, join_field="LINK_ID", join_type="KEEP_ALL")[0]
# Process: Feature Class to Feature Class (2) (Feature Class to Feature Class)
Removed_AltStreets = arcpy.FeatureClassToFeatureClass_conversion(in_features=AltStreets_Layer_Dissolve1_L, out_path=Model_Outputs_gdb, out_name="Removed_AltStreets", where_clause="AltStreets_Layer_Dissolve.ST_NAME = Streets_table.ST_NAME",field_mapping=Field_Map, config_keyword="")[0]
# Process: Feature Class to Feature Class (3) (Feature Class to Feature Class)
AltStreets_Final = arcpy.FeatureClassToFeatureClass_conversion(in_features=AltStreets_Layer_Dissolve1_L, out_path=Model_Outputs_gdb, out_name="AltStreets_Final", where_clause="AltStreets_Layer_Dissolve.ST_NAME <> Streets_table.ST_NAME", field_mapping=Field_Map_2_, config_keyword="")[0]
# Process: Merge (2) (Merge)
Basemap_shp = fr"{Project_Folder}\Basemap.shp"
arcpy.Merge_management(inputs=[Streets_Layer_24_, AltStreets_Final], output=Basemap_shp, field_mappings="REF_ZLEV \"REF_ZLEV\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,REF_ZLEV,-1,-1,Streets_Layer,REF_ZLEV,-1,-1,Streets_Layer,REF_ZLEV,-1,-1,Streets_Layer,REF_ZLEV,-1,-1;NREF_ZLEV \"NREF_ZLEV\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NREF_ZLEV,-1,-1,Streets_Layer,NREF_ZLEV,-1,-1,Streets_Layer,NREF_ZLEV,-1,-1,Streets_Layer,NREF_ZLEV,-1,-1;PlaceCodeL \"PlaceCodeL\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceCodeL,-1,-1,Streets_Layer,PlaceCodeL,-1,-1,Streets_Layer,PlaceCodeL,-1,-1,Streets_Layer,PlaceCodeL,-1,-1;PlaceCodeR \"PlaceCodeR\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceCodeR,-1,-1,Streets_Layer,PlaceCodeR,-1,-1,Streets_Layer,PlaceCodeR,-1,-1,Streets_Layer,PlaceCodeR,-1,-1;PlaceNamL \"PlaceNamL\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceNamL,0,255,Streets_Layer,PlaceNamL,0,255,Streets_Layer,PlaceNamL,0,255,Streets_Layer,PlaceNamL,0,255;PlaceNamR \"PlaceNamR\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceNamR,0,255,Streets_Layer,PlaceNamR,0,255,Streets_Layer,PlaceNamR,0,255,Streets_Layer,PlaceNamR,0,255;StateCodeL \"StateCodeL\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateCodeL,-1,-1,Streets_Layer,StateCodeL,-1,-1,Streets_Layer,StateCodeL,-1,-1,Streets_Layer,StateCodeL,-1,-1;StateCodeR \"StateCodeR\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateCodeR,-1,-1,Streets_Layer,StateCodeR,-1,-1,Streets_Layer,StateCodeR,-1,-1,Streets_Layer,StateCodeR,-1,-1;StateAbbrL \"StateAbbrL\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateAbbrL,0,2,Streets_Layer,StateAbbrL,0,255,Streets_Layer,StateAbbrL,0,255,Streets_Layer,StateAbbrL,0,255;StateAbbrR \"StateAbbrR\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateAbbrR,0,2,Streets_Layer,StateAbbrR,0,255,Streets_Layer,StateAbbrR,0,255,Streets_Layer,StateAbbrR,0,255;OneWay \"OneWay\" true true false 0 Short 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,OneWay,0,1,Streets_Layer,OneWay,-1,-1,Streets_Layer,OneWay,-1,-1,Streets_Layer,OneWay,-1,-1;Speed \"Speed\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,Speed,-1,-1,Streets_Layer,Speed,-1,-1,Streets_Layer,Speed,-1,-1,Streets_Layer,Speed,-1,-1;CFCC \"CFCC\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,CFCC,0,4,Streets_Layer,CFCC,0,255,Streets_Layer,CFCC,0,255,Streets_Layer,CFCC,0,255;M_LINK_ID \"M_LINK_ID\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,M_LINK_ID,-1,-1,Streets_Layer,M_LINK_ID,-1,-1,Streets_Layer,M_LINK_ID,-1,-1,Streets_Layer,M_LINK_ID,-1,-1;OLD_LINK_I \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,OLD_LINK_ID,-1,-1;ADDR_TYPE \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ADDR_TYPE,0,1,Streets_Layer,ADDR_TYPE,0,1,Streets_Layer,ADDR_TYPE,0,1,Streets_Layer,ADDR_TYPE,0,1;DIR_TRAVEL \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIR_TRAVEL,0,1,Streets_Layer,DIR_TRAVEL,0,1,Streets_Layer,DIR_TRAVEL,0,1,Streets_Layer,DIR_TRAVEL,0,1;DIRONSIGN \"DIRONSIGN\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIRONSIGN,0,1,Streets_Layer,DIRONSIGN,0,1,Streets_Layer,DIRONSIGN,0,1,Streets_Layer,DIRONSIGN,0,1;DIVIDER \"DIVIDER\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIVIDER,0,1,Streets_Layer,DIVIDER,0,1,Streets_Layer,DIVIDER,0,1,Streets_Layer,DIVIDER,0,1;Dom \"Dom\" true true false 2 Short 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,Dom,-1,-1;ENH_GEOM \"ENH_GEOM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ENH_GEOM,0,1,Streets_Layer,ENH_GEOM,0,1,Streets_Layer,ENH_GEOM,0,1,Streets_Layer,ENH_GEOM,0,1;EXITNAME \"EXITNAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,EXITNAME,0,1,Streets_Layer,EXITNAME,0,1,Streets_Layer,EXITNAME,0,1,Streets_Layer,EXITNAME,0,1;EXPLICATBL \"EXPLICATBL\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,EXPLICATBL,0,1,Streets_Layer,EXPLICATBL,0,1,Streets_Layer,EXPLICATBL,0,1,Streets_Layer,EXPLICATBL,0,1;FEAT_ID \"FEAT_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FEAT_ID,-1,-1,Streets_Layer,FEAT_ID,-1,-1,Streets_Layer,FEAT_ID,-1,-1,Streets_Layer,FEAT_ID,-1,-1;FR_SPD_LIM \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FR_SPD_LIM,-1,-1,Streets_Layer,FR_SPD_LIM,-1,-1,Streets_Layer,FR_SPD_LIM,-1,-1,Streets_Layer,FR_SPD_LIM,-1,-1;FROM_LANES \"FROM_LANES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FROM_LANES,-1,-1,Streets_Layer,FROM_LANES,-1,-1,Streets_Layer,FROM_LANES,-1,-1,Streets_Layer,FROM_LANES,-1,-1;FUNC_CLASS \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FUNC_CLASS,0,1,Streets_Layer,FUNC_CLASS,0,1,Streets_Layer,FUNC_CLASS,0,1,Streets_Layer,FUNC_CLASS,0,1;JUNCTIONNM \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,JUNCTIONNM,0,1,Streets_Layer,JUNCTIONNM,0,1,Streets_Layer,JUNCTIONNM,0,1,Streets_Layer,JUNCTIONNM,0,1;L_ADDRFORM \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_ADDRFORM,0,2,Streets_Layer,L_ADDRFORM,0,2,Streets_Layer,L_ADDRFORM,0,2,Streets_Layer,L_ADDRFORM,0,2;L_ADDRSCH \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_ADDRSCH,0,1,Streets_Layer,L_ADDRSCH,0,1,Streets_Layer,L_ADDRSCH,0,1,Streets_Layer,L_ADDRSCH,0,1;L_AREA_ID \"L_AREA_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_AREA_ID,-1,-1,Streets_Layer,L_AREA_ID,-1,-1,Streets_Layer,L_AREA_ID,-1,-1,Streets_Layer,L_AREA_ID,-1,-1;L_NREFADDR \"L_NREFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_NREFADDR,0,10,Streets_Layer,L_NREFADDR,0,10,Streets_Layer,L_NREFADDR,0,10,Streets_Layer,L_NREFADDR,0,10;L_NUMZONES \"L_NUMZONES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_NUMZONES,-1,-1,Streets_Layer,L_NUMZONES,-1,-1,Streets_Layer,L_NUMZONES,-1,-1,Streets_Layer,L_NUMZONES,-1,-1;L_POSTCODE \"L_POSTCODE\" true true false 11 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_POSTCODE,0,11,Streets_Layer,L_POSTCODE,0,11,Streets_Layer,L_POSTCODE,0,11,Streets_Layer,L_POSTCODE,0,11;L_REFADDR \"L_REFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_REFADDR,0,10,Streets_Layer,L_REFADDR,0,10,Streets_Layer,L_REFADDR,0,10,Streets_Layer,L_REFADDR,0,10;LANE_CAT \"LANE_CAT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,LANE_CAT,0,1,Streets_Layer,LANE_CAT,0,1,Streets_Layer,LANE_CAT,0,1,Streets_Layer,LANE_CAT,0,1;LINK_ID \"LINK_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,LINK_ID,-1,-1,Streets_Layer,LINK_ID,-1,-1,Streets_Layer,LINK_ID,-1,-1,Streets_Layer,LINK_ID,-1,-1;N_SHAPEPNT \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,N_SHAPEPNT,-1,-1,Streets_Layer,N_SHAPEPNT,-1,-1,Streets_Layer,N_SHAPEPNT,-1,-1,Streets_Layer,N_SHAPEPNT,-1,-1;NAMEONRDSN \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NAMEONRDSN,0,1,Streets_Layer,NAMEONRDSN,0,1,Streets_Layer,NAMEONRDSN,0,1,Streets_Layer,NAMEONRDSN,0,1;NREF_IN_ID \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NREF_IN_ID,-1,-1,Streets_Layer,NREF_IN_ID,-1,-1,Streets_Layer,NREF_IN_ID,-1,-1,Streets_Layer,NREF_IN_ID,-1,-1;NUM_AD_RNG \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NUM_AD_RNG,-1,-1,Streets_Layer,NUM_AD_RNG,-1,-1,Streets_Layer,NUM_AD_RNG,-1,-1,Streets_Layer,NUM_AD_RNG,-1,-1;POSTALNAME \"POSTALNAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,POSTALNAME,0,1,Streets_Layer,POSTALNAME,0,1,Streets_Layer,POSTALNAME,0,1,Streets_Layer,POSTALNAME,0,1;R_ADDRFORM \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_ADDRFORM,0,2,Streets_Layer,R_ADDRFORM,0,2,Streets_Layer,R_ADDRFORM,0,2,Streets_Layer,R_ADDRFORM,0,2;R_ADDRSCH \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_ADDRSCH,0,1,Streets_Layer,R_ADDRSCH,0,1,Streets_Layer,R_ADDRSCH,0,1,Streets_Layer,R_ADDRSCH,0,1;R_AREA_ID \"R_AREA_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_AREA_ID,-1,-1,Streets_Layer,R_AREA_ID,-1,-1,Streets_Layer,R_AREA_ID,-1,-1,Streets_Layer,R_AREA_ID,-1,-1;R_NREFADDR \"R_NREFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_NREFADDR,0,10,Streets_Layer,R_NREFADDR,0,10,Streets_Layer,R_NREFADDR,0,10,Streets_Layer,R_NREFADDR,0,10;R_NUMZONES \"R_NUMZONES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_NUMZONES,-1,-1,Streets_Layer,R_NUMZONES,-1,-1,Streets_Layer,R_NUMZONES,-1,-1,Streets_Layer,R_NUMZONES,-1,-1;R_POSTCODE \"R_POSTCODE\" true true false 11 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_POSTCODE,0,11,Streets_Layer,R_POSTCODE,0,11,Streets_Layer,R_POSTCODE,0,11,Streets_Layer,R_POSTCODE,0,11;R_REFADDR \"R_REFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_REFADDR,0,10,Streets_Layer,R_REFADDR,0,10,Streets_Layer,R_REFADDR,0,10,Streets_Layer,R_REFADDR,0,10;RAMP \"RAMP\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,RAMP,0,1,Streets_Layer,RAMP,0,1,Streets_Layer,RAMP,0,1,Streets_Layer,RAMP,0,1;REF_IN_ID \"REF_IN_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,REF_IN_ID,-1,-1,Streets_Layer,REF_IN_ID,-1,-1,Streets_Layer,REF_IN_ID,-1,-1,Streets_Layer,REF_IN_ID,-1,-1;ROUTE_TYPE \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ROUTE_TYPE,0,1,Streets_Layer,ROUTE_TYPE,0,1,Streets_Layer,ROUTE_TYPE,0,1,Streets_Layer,ROUTE_TYPE,0,1;SCENIC_NM \"SCENIC_NM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SCENIC_NM,0,1,Streets_Layer,SCENIC_NM,0,1,Streets_Layer,SCENIC_NM,0,1,Streets_Layer,SCENIC_NM,0,1;SPEED_CAT \"SPEED_CAT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SPEED_CAT,0,1,Streets_Layer,SPEED_CAT,0,1,Streets_Layer,SPEED_CAT,0,1,Streets_Layer,SPEED_CAT,0,1;ST_LANGCD \"ST_LANGCD\" true true false 3 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_LANGCD,0,3,Streets_Layer,ST_LANGCD,0,3,Streets_Layer,ST_LANGCD,0,3,Streets_Layer,ST_LANGCD,0,3;ST_NAME \"ST_NAME\" true true false 240 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NAME,0,240,Streets_Layer,ST_NAME,0,120,Streets_Layer,ST_NAME,0,120,Streets_Layer,ST_NAME,0,120;ST_NM_BASE \"ST_NM_BASE\" true true false 105 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NM_BASE,0,105,Streets_Layer,ST_NM_BASE,0,70,Streets_Layer,ST_NM_BASE,0,70,Streets_Layer,ST_NM_BASE,0,70;ST_NM_PREF \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NM_PREF,0,6,Streets_Layer,ST_NM_PREF,0,6,Streets_Layer,ST_NM_PREF,0,6,Streets_Layer,ST_NM_PREF,0,6;ST_NM_SUFF \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NM_SUFF,0,6,Streets_Layer,ST_NM_SUFF,0,6,Streets_Layer,ST_NM_SUFF,0,6,Streets_Layer,ST_NM_SUFF,0,6;ST_TYP_AFT \"ST_TYP_AFT\" true true false 90 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_TYP_AFT,0,90,Streets_Layer,ST_TYP_AFT,0,50,Streets_Layer,ST_TYP_AFT,0,50,Streets_Layer,ST_TYP_AFT,0,50;ST_TYP_ATT \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_TYP_ATT,0,1,Streets_Layer,ST_TYP_ATT,0,1,Streets_Layer,ST_TYP_ATT,0,1,Streets_Layer,ST_TYP_ATT,0,1;ST_TYP_BEF \"ST_TYP_BEF\" true true false 90 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_TYP_BEF,0,90,Streets_Layer,ST_TYP_BEF,0,50,Streets_Layer,ST_TYP_BEF,0,50,Streets_Layer,ST_TYP_BEF,0,50;STALENAME \"STALENAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,STALENAME,0,1,Streets_Layer,STALENAME,0,1,Streets_Layer,STALENAME,0,1,Streets_Layer,STALENAME,0,1;TO_LANES \"TO_LANES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TO_LANES,-1,-1,Streets_Layer,TO_LANES,-1,-1,Streets_Layer,TO_LANES,-1,-1,Streets_Layer,TO_LANES,-1,-1;TO_SPD_LIM \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TO_SPD_LIM,-1,-1,Streets_Layer,TO_SPD_LIM,-1,-1,Streets_Layer,TO_SPD_LIM,-1,-1,Streets_Layer,TO_SPD_LIM,-1,-1;VANITYNAME \"VANITYNAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,VANITYNAME,0,1,Streets_Layer,VANITYNAME,0,1,Streets_Layer,VANITYNAME,0,1,Streets_Layer,VANITYNAME,0,1;LINK_ID_1 \"LINK_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,LINK_ID_1,-1,-1;ST_NAME_1 \"ST_NAME\" true true false 120 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NAME_1,0,120;FEAT_ID_1 \"FEAT_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FEAT_ID_1,-1,-1;ST_LANGCD_ \"ST_LANGCD\" true true false 3 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_LANGCD_1,0,3;NUM_STNMES \"NUM_STNMES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NUM_STNMES,-1,-1,Streets_Layer,NUM_STNMES,-1,-1,Streets_Layer,NUM_STNMES,-1,-1,Streets_Layer,NUM_STNMES,-1,-1;ST_NM_PR_1 \"ST_NM_PREF\" true true false 6 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NM_PREF_1,0,6;ST_TYP_B_1 \"ST_TYP_BEF\" true true false 50 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_TYP_BEF_1,0,50;ST_NM_BA_1 \"ST_NM_BASE\" true true false 70 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NM_BASE_1,0,70;ST_NM_SU_1 \"ST_NM_SUFF\" true true false 6 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_NM_SUFF_1,0,6;ST_TYP_A_1 \"ST_TYP_AFT\" true true false 50 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_TYP_AFT_1,0,50;ST_TYP_A_2 \"ST_TYP_ATT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ST_TYP_ATT_1,0,1;ADDR_TYPE_ \"ADDR_TYPE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ADDR_TYPE_1,0,1;L_REFADDR_ \"L_REFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_REFADDR_1,0,10;L_NREFAD_1 \"L_NREFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_NREFADDR_1,0,10;L_ADDRSCH_ \"L_ADDRSCH\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_ADDRSCH_1,0,1;L_ADDRFO_1 \"L_ADDRFORM\" true true false 2 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_ADDRFORM_1,0,2;R_REFADDR_ \"R_REFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_REFADDR_1,0,10;R_NREFAD_1 \"R_NREFADDR\" true true false 10 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_NREFADDR_1,0,10;R_ADDRSCH_ \"R_ADDRSCH\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_ADDRSCH_1,0,1;R_ADDRFO_1 \"R_ADDRFORM\" true true false 2 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_ADDRFORM_1,0,2;REF_IN_ID_ \"REF_IN_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,REF_IN_ID_1,-1,-1;NREF_IN__1 \"NREF_IN_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NREF_IN_ID_1,-1,-1;N_SHAPEP_1 \"N_SHAPEPNT\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,N_SHAPEPNT_1,-1,-1;FUNC_CLA_1 \"FUNC_CLASS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FUNC_CLASS_1,0,1;SPEED_CAT_ \"SPEED_CAT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SPEED_CAT_1,0,1;FR_SPD_L_1 \"FR_SPD_LIM\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FR_SPD_LIM_1,-1,-1;TO_SPD_L_1 \"TO_SPD_LIM\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TO_SPD_LIM_1,-1,-1;TO_LANES_1 \"TO_LANES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TO_LANES_1,-1,-1;FROM_LAN_1 \"FROM_LANES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FROM_LANES_1,-1,-1;ENH_GEOM_1 \"ENH_GEOM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ENH_GEOM_1,0,1;LANE_CAT_1 \"LANE_CAT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,LANE_CAT_1,0,1;DIVIDER_1 \"DIVIDER\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIVIDER_1,0,1;DIR_TRAV_1 \"DIR_TRAVEL\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIR_TRAVEL_1,0,1;L_AREA_ID_ \"L_AREA_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_AREA_ID_1,-1,-1;R_AREA_ID_ \"R_AREA_ID\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_AREA_ID_1,-1,-1;L_POSTCO_1 \"L_POSTCODE\" true true false 11 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_POSTCODE_1,0,11;R_POSTCO_1 \"R_POSTCODE\" true true false 11 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_POSTCODE_1,0,11;L_NUMZON_1 \"L_NUMZONES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,L_NUMZONES_1,-1,-1;R_NUMZON_1 \"R_NUMZONES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,R_NUMZONES_1,-1,-1;NUM_AD_R_1 \"NUM_AD_RNG\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NUM_AD_RNG_1,-1,-1;AR_AUTO \"AR_AUTO\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_AUTO,0,1,Streets_Layer,AR_AUTO,0,1,Streets_Layer,AR_AUTO,0,1,Streets_Layer,AR_AUTO,0,1;AR_BUS \"AR_BUS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_BUS,0,1,Streets_Layer,AR_BUS,0,1,Streets_Layer,AR_BUS,0,1,Streets_Layer,AR_BUS,0,1;AR_TAXIS \"AR_TAXIS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_TAXIS,0,1,Streets_Layer,AR_TAXIS,0,1,Streets_Layer,AR_TAXIS,0,1,Streets_Layer,AR_TAXIS,0,1;AR_CARPOOL \"AR_CARPOOL\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_CARPOOL,0,1,Streets_Layer,AR_CARPOOL,0,1,Streets_Layer,AR_CARPOOL,0,1,Streets_Layer,AR_CARPOOL,0,1;AR_PEDEST \"AR_PEDEST\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_PEDEST,0,1,Streets_Layer,AR_PEDEST,0,1,Streets_Layer,AR_PEDEST,0,1,Streets_Layer,AR_PEDEST,0,1;AR_TRUCKS \"AR_TRUCKS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_TRUCKS,0,1,Streets_Layer,AR_TRUCKS,0,1,Streets_Layer,AR_TRUCKS,0,1,Streets_Layer,AR_TRUCKS,0,1;AR_TRAFF \"AR_TRAFF\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_TRAFF,0,1,Streets_Layer,AR_TRAFF,0,1,Streets_Layer,AR_TRAFF,0,1,Streets_Layer,AR_TRAFF,0,1;AR_DELIV \"AR_DELIV\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_DELIV,0,1,Streets_Layer,AR_DELIV,0,1,Streets_Layer,AR_DELIV,0,1,Streets_Layer,AR_DELIV,0,1;AR_EMERVEH \"AR_EMERVEH\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_EMERVEH,0,1,Streets_Layer,AR_EMERVEH,0,1,Streets_Layer,AR_EMERVEH,0,1,Streets_Layer,AR_EMERVEH,0,1;AR_MOTOR \"AR_MOTOR\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,AR_MOTOR,0,1,Streets_Layer,AR_MOTOR,0,1,Streets_Layer,AR_MOTOR,0,1,Streets_Layer,AR_MOTOR,0,1;PAVED \"PAVED\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PAVED,0,1,Streets_Layer,PAVED,0,1,Streets_Layer,PAVED,0,1,Streets_Layer,PAVED,0,1;PRIVATE \"PRIVATE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PRIVATE,0,1,Streets_Layer,PRIVATE,0,1,Streets_Layer,PRIVATE,0,1,Streets_Layer,PRIVATE,0,1;FRONTAGE \"FRONTAGE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FRONTAGE,0,1,Streets_Layer,FRONTAGE,0,1,Streets_Layer,FRONTAGE,0,1,Streets_Layer,FRONTAGE,0,1;BRIDGE \"BRIDGE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,BRIDGE,0,1,Streets_Layer,BRIDGE,0,1,Streets_Layer,BRIDGE,0,1,Streets_Layer,BRIDGE,0,1;TUNNEL \"TUNNEL\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TUNNEL,0,1,Streets_Layer,TUNNEL,0,1,Streets_Layer,TUNNEL,0,1,Streets_Layer,TUNNEL,0,1;RAMP_1 \"RAMP\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,RAMP_1,0,1;TOLLWAY \"TOLLWAY\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TOLLWAY,0,1,Streets_Layer,TOLLWAY,0,1,Streets_Layer,TOLLWAY,0,1,Streets_Layer,TOLLWAY,0,1;POIACCESS \"POIACCESS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,POIACCESS,0,1,Streets_Layer,POIACCESS,0,1,Streets_Layer,POIACCESS,0,1,Streets_Layer,POIACCESS,0,1;CONTRACC \"CONTRACC\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,CONTRACC,0,1,Streets_Layer,CONTRACC,0,1,Streets_Layer,CONTRACC,0,1,Streets_Layer,CONTRACC,0,1;ROUNDABOUT \"ROUNDABOUT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ROUNDABOUT,0,1,Streets_Layer,ROUNDABOUT,0,1,Streets_Layer,ROUNDABOUT,0,1,Streets_Layer,ROUNDABOUT,0,1;INTERINTER \"INTERINTER\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,INTERINTER,0,1,Streets_Layer,INTERINTER,0,1,Streets_Layer,INTERINTER,0,1,Streets_Layer,INTERINTER,0,1;UNDEFTRAFF \"UNDEFTRAFF\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,UNDEFTRAFF,0,1,Streets_Layer,UNDEFTRAFF,0,1,Streets_Layer,UNDEFTRAFF,0,1,Streets_Layer,UNDEFTRAFF,0,1;FERRY_TYPE \"FERRY_TYPE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FERRY_TYPE,0,1,Streets_Layer,FERRY_TYPE,0,1,Streets_Layer,FERRY_TYPE,0,1,Streets_Layer,FERRY_TYPE,0,1;MULTIDIGIT \"MULTIDIGIT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,MULTIDIGIT,0,1,Streets_Layer,MULTIDIGIT,0,1,Streets_Layer,MULTIDIGIT,0,1,Streets_Layer,MULTIDIGIT,0,1;MAXATTR \"MAXATTR\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,MAXATTR,0,1,Streets_Layer,MAXATTR,0,1,Streets_Layer,MAXATTR,0,1,Streets_Layer,MAXATTR,0,1;SPECTRFIG \"SPECTRFIG\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SPECTRFIG,0,1,Streets_Layer,SPECTRFIG,0,1,Streets_Layer,SPECTRFIG,0,1,Streets_Layer,SPECTRFIG,0,1;INDESCRIB \"INDESCRIB\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,INDESCRIB,0,1,Streets_Layer,INDESCRIB,0,1,Streets_Layer,INDESCRIB,0,1,Streets_Layer,INDESCRIB,0,1;MANOEUVRE \"MANOEUVRE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,MANOEUVRE,0,1,Streets_Layer,MANOEUVRE,0,1,Streets_Layer,MANOEUVRE,0,1,Streets_Layer,MANOEUVRE,0,1;DIVIDERLEG \"DIVIDERLEG\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIVIDERLEG,0,1,Streets_Layer,DIVIDERLEG,0,1,Streets_Layer,DIVIDERLEG,0,1,Streets_Layer,DIVIDERLEG,0,1;INPROCDATA \"INPROCDATA\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,INPROCDATA,0,1,Streets_Layer,INPROCDATA,0,1,Streets_Layer,INPROCDATA,0,1,Streets_Layer,INPROCDATA,0,1;FULL_GEOM \"FULL_GEOM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FULL_GEOM,0,1,Streets_Layer,FULL_GEOM,0,1,Streets_Layer,FULL_GEOM,0,1,Streets_Layer,FULL_GEOM,0,1;URBAN \"URBAN\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,URBAN,0,1,Streets_Layer,URBAN,0,1,Streets_Layer,URBAN,0,1,Streets_Layer,URBAN,0,1;ROUTE_TY_1 \"ROUTE_TYPE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,ROUTE_TYPE_1,0,1;DIRONSIGN_ \"DIRONSIGN\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,DIRONSIGN_1,0,1;EXPLICAT_1 \"EXPLICATBL\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,EXPLICATBL_1,0,1;NAMEONRD_1 \"NAMEONRDSN\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NAMEONRDSN_1,0,1;POSTALNA_1 \"POSTALNAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,POSTALNAME_1,0,1;STALENAME_ \"STALENAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,STALENAME_1,0,1;VANITYNA_1 \"VANITYNAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,VANITYNAME_1,0,1;JUNCTION_1 \"JUNCTIONNM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,JUNCTIONNM_1,0,1;EXITNAME_1 \"EXITNAME\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,EXITNAME_1,0,1;SCENIC_RT \"SCENIC_RT\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SCENIC_RT,0,1,Streets_Layer,SCENIC_RT,0,1,Streets_Layer,SCENIC_RT,0,1,Streets_Layer,SCENIC_RT,0,1;SCENIC_NM_ \"SCENIC_NM\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SCENIC_NM_1,0,1;FOURWHLDR \"FOURWHLDR\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,FOURWHLDR,0,1,Streets_Layer,FOURWHLDR,0,1,Streets_Layer,FOURWHLDR,0,1,Streets_Layer,FOURWHLDR,0,1;COVERIND \"COVERIND\" true true false 2 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,COVERIND,0,2,Streets_Layer,COVERIND,0,2,Streets_Layer,COVERIND,0,2,Streets_Layer,COVERIND,0,2;PLOT_ROAD \"PLOT_ROAD\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PLOT_ROAD,0,1,Streets_Layer,PLOT_ROAD,0,1,Streets_Layer,PLOT_ROAD,0,1,Streets_Layer,PLOT_ROAD,0,1;REVERSIBLE \"REVERSIBLE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,REVERSIBLE,0,1,Streets_Layer,REVERSIBLE,0,1,Streets_Layer,REVERSIBLE,0,1,Streets_Layer,REVERSIBLE,0,1;EXPR_LANE \"EXPR_LANE\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,EXPR_LANE,0,1,Streets_Layer,EXPR_LANE,0,1,Streets_Layer,EXPR_LANE,0,1,Streets_Layer,EXPR_LANE,0,1;CARPOOLRD \"CARPOOLRD\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,CARPOOLRD,0,1,Streets_Layer,CARPOOLRD,0,1,Streets_Layer,CARPOOLRD,0,1,Streets_Layer,CARPOOLRD,0,1;PHYS_LANES \"PHYS_LANES\" true true false 8 Double 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PHYS_LANES,-1,-1,Streets_Layer,PHYS_LANES,-1,-1,Streets_Layer,PHYS_LANES,-1,-1,Streets_Layer,PHYS_LANES,-1,-1;VER_TRANS \"VER_TRANS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,VER_TRANS,0,1,Streets_Layer,VER_TRANS,0,1,Streets_Layer,VER_TRANS,0,1,Streets_Layer,VER_TRANS,0,1;PUB_ACCESS \"PUB_ACCESS\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PUB_ACCESS,0,1,Streets_Layer,PUB_ACCESS,0,1,Streets_Layer,PUB_ACCESS,0,1,Streets_Layer,PUB_ACCESS,0,1;LOW_MBLTY \"LOW_MBLTY\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,LOW_MBLTY,0,1,Streets_Layer,LOW_MBLTY,0,1,Streets_Layer,LOW_MBLTY,0,1,Streets_Layer,LOW_MBLTY,0,1;PRIORITYRD \"PRIORITYRD\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PRIORITYRD,0,1,Streets_Layer,PRIORITYRD,0,1,Streets_Layer,PRIORITYRD,0,1,Streets_Layer,PRIORITYRD,0,1;SPD_LM_SRC \"SPD_LM_SRC\" true true false 2 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,SPD_LM_SRC,0,2,Streets_Layer,SPD_LM_SRC,0,2,Streets_Layer,SPD_LM_SRC,0,2,Streets_Layer,SPD_LM_SRC,0,2;EXPAND_INC \"EXPAND_INC\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,EXPAND_INC,0,1,Streets_Layer,EXPAND_INC,0,1,Streets_Layer,EXPAND_INC,0,1,Streets_Layer,EXPAND_INC,0,1;TRANS_AREA \"TRANS_AREA\" true true false 1 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,TRANS_AREA,0,1,Streets_Layer,TRANS_AREA,0,1,Streets_Layer,TRANS_AREA,0,1,Streets_Layer,TRANS_AREA,0,1;REF_ZLEV_1 \"REF_ZLEV\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,REF_ZLEV_1,-1,-1;NREF_ZLEV_ \"NREF_ZLEV\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,NREF_ZLEV_1,-1,-1;PlaceCod_1 \"PlaceCodeL\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceCodeL_1,-1,-1;PlaceCod_2 \"PlaceCodeR\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceCodeR_1,-1,-1;PlaceNamL_ \"PlaceNamL\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceNamL_1,0,255;PlaceNamR_ \"PlaceNamR\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,PlaceNamR_1,0,255;StateCod_1 \"StateCodeL\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateCodeL_1,-1,-1;StateCod_2 \"StateCodeR\" true true false 0 Long 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateCodeR_1,-1,-1;StateAbb_1 \"StateAbbrL\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateAbbrL_1,0,255;StateAbb_2 \"StateAbbrR\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,StateAbbrR_1,0,255;OneWay_1 \"OneWay\" true true false 0 Short 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,OneWay_1,-1,-1;Speed_1 \"Speed\" true true false 0 Short 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,Speed_1,-1,-1;CFCC_1 \"CFCC\" true true false 255 Text 0 0,First,#,C:\\Users\\wkjenkins\\Documents\\local_gis\\mapmaker\\MapMaker-Conversion-2019\\190605\\Model Outputs.gdb\\AltStreets_Final,CFCC_1,0,255;InLine_FID \"InLine_FID\" true true false 4 Long 0 0,First,#,Streets_Layer,InLine_FID,-1,-1,Streets_Layer,InLine_FID,-1,-1,Streets_Layer,InLine_FID,-1,-1;SimLnFlag \"SimLnFlag\" true true false 2 Short 0 0,First,#,Streets_Layer,SimLnFlag,-1,-1,Streets_Layer,SimLnFlag,-1,-1,Streets_Layer,SimLnFlag,-1,-1;MaxSimpTol \"MaxSimpTol\" true true false 8 Double 0 0,First,#,Streets_Layer,MaxSimpTol,-1,-1,Streets_Layer,MaxSimpTol,-1,-1,Streets_Layer,MaxSimpTol,-1,-1;MinSimpTol \"MinSimpTol\" true true false 8 Double 0 0,First,#,Streets_Layer,MinSimpTol,-1,-1,Streets_Layer,MinSimpTol,-1,-1,Streets_Layer,MinSimpTol,-1,-1;OLD_LINK_1 \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,Streets_Layer,OLD_LINK_ID,-1,-1;OLD_LINK_2 \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,Streets_Layer,OLD_LINK_ID,-1,-1;OLD_LINK_3 \"OLD_LINK_ID\" true true false 0 Long 0 0,First,#,Streets_Layer,OLD_LINK_ID,-1,-1", add_source="NO_SOURCE_INFO")
# Process: Sort (Sort)
Basemap_Sort_190828 = fr"{Model_Outputs_gdb}\Basemap_Sort_190828"
arcpy.Sort_management(in_dataset=Basemap_shp, out_dataset=Basemap_Sort_190828, sort_field=[["REF_ZLEV", "DESCENDING"]], spatial_sort_method="UR")
# Process: Calculate Field (Calculate Field)
Basemap_Sort_190828_4_ = arcpy.CalculateField_management(in_table=Basemap_Sort_190828, field="ST_NAME", expression="ifBlock(!ST_NAME!)", expression_type="PYTHON3", code_block="def ifBlock(name):
if (name == ' '):
return 'UNNAMED ROAD'
else:
return name
", field_type="TEXT")[0]
# Process: Calculate Field (2) (Calculate Field)
Basemap_Sort_190828_2_ = arcpy.CalculateField_management(in_table=Basemap_Sort_190828_4_, field="CFCC", expression="ifBlock(!CFCC!,!ST_NAME!)", expression_type="PYTHON3", code_block="list = ['I-64', 'I-44', 'I-55', 'I-170']
def inter(list, val):
for x in list:
if val == x:
return true
else:
return false
def ifBlock(c, name):
if(c == 'A40'):
return 'A30'
elif(inter(list, name)):
return 'A10'
else:
return c", field_type="TEXT")[0]
# Process: Calculate Field (3) (Calculate Field)
Streets_Layer_9_ = arcpy.CalculateField_management(in_table=Streets_Layer_8_, field="Streets.PlaceCodeR", expression="!Adminbndy4.POLYGON_NM!", expression_type="PYTHON3", code_block="", field_type="TEXT")[0]
if __name__ == '__main__':
# Global Environment settings
with arcpy.EnvManager(scratchWorkspace=r"C:\Users\wkjenkins\Documents\local_gis\mapmaker\MM190531\MyProject10\MyProject10.gdb", workspace=r"C:\Users\wkjenkins\Documents\local_gis\mapmaker\MM190531\MyProject10\MyProject10.gdb"):
Model5(*argv[1:])
| 552.634981
| 45,587
| 0.79236
| 24,979
| 145,343
| 4.354618
| 0.015973
| 0.063986
| 0.055988
| 0.063103
| 0.934672
| 0.925322
| 0.919558
| 0.905244
| 0.903175
| 0.895242
| 0
| 0.062049
| 0.068459
| 145,343
| 262
| 45,588
| 554.744275
| 0.741346
| 0.022216
| 0
| 0.240876
| 1
| 6.058394
| 0.849348
| 0.620053
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.014599
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
c923ff3eef99c3d760e7b19a35c9198141714c37
| 163
|
py
|
Python
|
pybrain/rl/environments/__init__.py
|
sveilleux1/pybrain
|
1e1de73142c290edb84e29ca7850835f3e7bca8b
|
[
"BSD-3-Clause"
] | 2,208
|
2015-01-02T02:14:41.000Z
|
2022-03-31T04:45:46.000Z
|
pybrain/rl/environments/__init__.py
|
sveilleux1/pybrain
|
1e1de73142c290edb84e29ca7850835f3e7bca8b
|
[
"BSD-3-Clause"
] | 91
|
2015-01-08T16:42:16.000Z
|
2021-12-11T19:16:35.000Z
|
pybrain/rl/environments/__init__.py
|
sveilleux1/pybrain
|
1e1de73142c290edb84e29ca7850835f3e7bca8b
|
[
"BSD-3-Clause"
] | 786
|
2015-01-02T15:18:20.000Z
|
2022-02-23T23:42:40.000Z
|
from pybrain.rl.environments.environment import Environment
from pybrain.rl.environments.task import Task
from pybrain.rl.environments.episodic import EpisodicTask
| 54.333333
| 59
| 0.877301
| 21
| 163
| 6.809524
| 0.428571
| 0.230769
| 0.272727
| 0.524476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067485
| 163
| 3
| 60
| 54.333333
| 0.940789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c9610b208ec4ba0c9816896a0f4571e50054ee67
| 1,891
|
py
|
Python
|
api/src/dto/EmissionDto.py
|
SamuelJansen/queue-manager-api
|
53e95eacb90025e2464b9d32fe0e37df4f344d18
|
[
"MIT"
] | null | null | null |
api/src/dto/EmissionDto.py
|
SamuelJansen/queue-manager-api
|
53e95eacb90025e2464b9d32fe0e37df4f344d18
|
[
"MIT"
] | null | null | null |
api/src/dto/EmissionDto.py
|
SamuelJansen/queue-manager-api
|
53e95eacb90025e2464b9d32fe0e37df4f344d18
|
[
"MIT"
] | null | null | null |
from python_framework import ConverterStatic
from constant import EmissionConstant
class EmissionRequestDto:
def __init__(self,
queueKey = None,
subscriptionKey = None,
url = None,
tries = None,
onErrorUrl = None,
onErrorTries = None,
maxTries = None,
backOff = None,
message = None
):
self.queueKey = queueKey
self.subscriptionKey = subscriptionKey
self.url = url
self.tries = ConverterStatic.getValueOrDefault(tries, EmissionConstant.ZERO_TRIES)
self.onErrorUrl = onErrorUrl
self.onErrorTries = ConverterStatic.getValueOrDefault(onErrorTries, EmissionConstant.ZERO_TRIES)
self.maxTries = ConverterStatic.getValueOrDefault(maxTries, EmissionConstant.DEFAULT_MAX_TRIES)
self.backOff = ConverterStatic.getValueOrDefault(backOff, EmissionConstant.DEFAULT_BACKOFF)
self.message = message
class EmissionResponseDto:
def __init__(self,
queueKey = None,
subscriptionKey = None,
url = None,
tries = None,
onErrorUrl = None,
onErrorTries = None,
maxTries = None,
backOff = None,
message = None,
history = None
):
self.queueKey = queueKey
self.subscriptionKey = subscriptionKey
self.url = url
self.tries = ConverterStatic.getValueOrDefault(tries, EmissionConstant.ZERO_TRIES)
self.onErrorUrl = onErrorUrl
self.onErrorTries = ConverterStatic.getValueOrDefault(onErrorTries, EmissionConstant.ZERO_TRIES)
self.maxTries = ConverterStatic.getValueOrDefault(maxTries, EmissionConstant.DEFAULT_MAX_TRIES)
self.backOff = ConverterStatic.getValueOrDefault(backOff, EmissionConstant.DEFAULT_BACKOFF)
self.message = message
self.history = ConverterStatic.getValueOrDefault(history, [])
| 36.365385
| 104
| 0.684823
| 158
| 1,891
| 8.075949
| 0.177215
| 0.225705
| 0.07837
| 0.090909
| 0.860502
| 0.860502
| 0.860502
| 0.860502
| 0.860502
| 0.860502
| 0
| 0
| 0.24643
| 1,891
| 51
| 105
| 37.078431
| 0.895439
| 0
| 0
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.043478
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c97b215c4e0cbbd748b49783c3c0586e803f305f
| 6,040
|
py
|
Python
|
plots.py
|
nargesalavi/Variational_Quantum_Embedding
|
3f9f5093d2b54e96969a6de5f35661c4df55e59e
|
[
"MIT"
] | 1
|
2021-02-22T12:32:47.000Z
|
2021-02-22T12:32:47.000Z
|
plots.py
|
nargesalavi/Variational_Quantum_Embedding
|
3f9f5093d2b54e96969a6de5f35661c4df55e59e
|
[
"MIT"
] | null | null | null |
plots.py
|
nargesalavi/Variational_Quantum_Embedding
|
3f9f5093d2b54e96969a6de5f35661c4df55e59e
|
[
"MIT"
] | 1
|
2021-01-23T07:15:44.000Z
|
2021-01-23T07:15:44.000Z
|
import matplotlib.pyplot as plt
#import matplotlib.axes as axes
import numpy as np
import pandas
def plot_axes_IdVsCost():
#axes.Axis.set_axisbelow(True)
x = np.array([1,2,3,4,5,6,7,8])
my_xticks = ['1','2','3','4','5','6','7','8']
plt.xticks(x, my_xticks)
# for L=1,Nq=1,d=1
# for L=1,Nq=2,d=1
# for L=1,Nq=3,d=1
# for L=1,Nq=4,d=1
y = np.array([0.207044,np.nan,np.nan,0.206619,np.nan,np.nan,np.nan,np.nan])
plt.scatter(x, y, marker='^',color='blue',label='L=1,Nq=4,d=1')
# for l=2,Nq=1,d=1
# for l=2,Nq=2,d=1
# for l=2,Nq=3,d=1
y = np.array([0.376935,np.nan,0.326575,0.182479,np.nan,np.nan,np.nan,np.nan])
plt.scatter(x, y, marker='o',color='red',label='L=2,Nq=3,d=1')
# for l=2,Nq=4,d=1
y = np.array([0.400412,np.nan,np.nan,np.nan,0.593843,0.722007,np.nan,np.nan])
plt.scatter(x, y, marker='o',color='blue',label='L=2,Nq=4,d=1')
# for l=3
# for l=4,Nq=1,d=1
y = np.array([0.116092,0.103657,0.312526,np.nan,np.nan,np.nan,np.nan,np.nan])
plt.scatter(x, y, marker='s',color='purple',label='L=4,Nq=1,d=1')
# for l=4,Nq=2,d=1
y = np.array([np.nan,np.nan,0.375075,0.325434,np.nan,np.nan,0.398591,0.660803])
plt.scatter(x, y, marker='s',color='green',label='L=4,Nq=2,d=1')
# for l=4,Nq=3,d=1
# for l=4,Nq=4,d=1
# for l=1,Nq=1..3,d=2
# for l=1,Nq=4,d=2
y = np.array([np.nan,np.nan,np.nan,np.nan,0.748411,np.nan,np.nan,np.nan])
plt.scatter(x, y,marker='^',facecolors='none',edgecolors='blue',label='L=1,Nq=4,d=2')
# for l=2,Nq=1,d=2
# for l=2,Nq=2,d=2
y = np.array([np.nan,np.nan,np.nan,np.nan,np.nan,np.nan,0.270515,0.92881])
plt.scatter(x, y,marker='o',facecolors='none',edgecolors='green',label='L=2,Nq=2,d=2')
# for l=2,Nq=3,d=2
# for l=2,Nq=4,d=2
y = np.array([np.nan,np.nan,np.nan,np.nan,0.719350,np.nan,np.nan,0.568995])
plt.scatter(x, y,marker='o',facecolors='none',edgecolors='blue',label='L=2,Nq=4,d=2')
# for l=3
# for l=4,Nq=2,d=2
y = np.array([0.482175,np.nan,np.nan,np.nan,np.nan,np.nan,0.469099,0.398838])
plt.scatter(x, y,marker='s',facecolors='none',edgecolors='green',label='L=4,Nq=2,d=2')
plt.grid(b=True, which='both', color='#666666', linestyle='--')
plt.legend(bbox_to_anchor=(1.001, 1), loc='upper left')
plt.xlabel("Circuit ID", fontsize=13)
plt.ylabel("Cost Function After 300 Steps", fontsize=13)
plt.show()
def plot_axes_LayersVsCost():
#ID == marker, Nq==color, d==filled/nonfilled
#x = np.array([1,2,3,4])
x = np.array([1,2,3,4])
#my_xticks = ['1','2','3','4']
#plt.xticks(x, my_xticks)
# for ID=1,Nq=1,d=1
y = np.array([np.nan,np.nan,np.nan,0.116092])
plt.scatter(x, y, marker='^',color='blue',label='ID=1,Nq=4,d=1')
# for ID=1,Nq=2,d=1
# for ID=1,Nq=3,d=1
y = np.array([np.nan,0.376935,np.nan,np.nan])
plt.scatter(x, y, marker='^',color='red',label='ID=1,Nq=4,d=1')
# for ID=1,Nq=4,d=1
y = np.array([0.207044,0.400412,np.nan,np.nan])
plt.scatter(x, y, marker='^',color='blue',label='ID=1,Nq=4,d=1')
#___________________________________ID=2________________________________
# for ID=2,Nq=1,d=1
y = np.array([np.nan,np.nan,np.nan,0.103657])
plt.scatter(x, y, marker='o',color='blue',label='ID=2,Nq=4,d=1')
# for ID=2,Nq=2,d=1
# for ID=2,Nq=3,d=1
# for ID=2,Nq=4,d=1
#___________________________________ID=3________________________________
# for ID=3,Nq=1,d=1
y = np.array([np.nan,np.nan,np.nan,0.312526])
plt.scatter(x, y, marker='s',color='blue',label='ID=3,Nq=4,d=1')
# for ID=3,Nq=2,d=1
y = np.array([np.nan,np.nan,np.nan,0.375075])
plt.scatter(x, y, marker='s',color='green',label='ID=3,Nq=4,d=1')
# for ID=3,Nq=3,d=1
y = np.array([np.nan,0.326575,np.nan,np.nan])
plt.scatter(x, y, marker='s',color='red',label='ID=3,Nq=4,d=1')
# for l=2,Nq=2,d=1
# for l=2,Nq=3,d=1
y = np.array([0.376935,np.nan,0.326575,0.182479,np.nan,np.nan,np.nan,np.nan])
plt.scatter(x, y, marker='o',color='red',label='L=2,Nq=3,d=1')
# for l=2,Nq=4,d=1
y = np.array([0.400412,np.nan,np.nan,np.nan,0.593843,0.722007,np.nan,np.nan])
plt.scatter(x, y, marker='o',color='blue',label='L=2,Nq=4,d=1')
# for l=3
# for l=4,Nq=1,d=1
y = np.array([0.116092,0.103657,0.312526,np.nan,np.nan,np.nan,np.nan,np.nan])
plt.scatter(x, y, marker='s',color='purple',label='L=4,Nq=1,d=1')
# for l=4,Nq=2,d=1
y = np.array([np.nan,np.nan,0.375075,0.325434,np.nan,np.nan,0.398591,0.660803])
plt.scatter(x, y, marker='s',color='green',label='L=4,Nq=2,d=1')
# for l=4,Nq=3,d=1
# for l=4,Nq=4,d=1
# for l=1,Nq=1..3,d=2
# for l=1,Nq=4,d=2
y = np.array([np.nan,np.nan,np.nan,np.nan,0.748411,np.nan,np.nan,np.nan])
plt.scatter(x, y,marker='^',facecolors='none',edgecolors='blue',label='L=1,Nq=4,d=2')
# for l=2,Nq=1,d=2
# for l=2,Nq=2,d=2
y = np.array([np.nan,np.nan,np.nan,np.nan,np.nan,np.nan,0.270515,0.92881])
plt.scatter(x, y,marker='o',facecolors='none',edgecolors='green',label='L=2,Nq=2,d=2')
# for l=2,Nq=3,d=2
# for l=2,Nq=4,d=2
y = np.array([np.nan,np.nan,np.nan,np.nan,0.719350,np.nan,np.nan,0.568995])
plt.scatter(x, y,marker='o',facecolors='none',edgecolors='blue',label='L=2,Nq=4,d=2')
# for l=3
# for l=4,Nq=2,d=2
y = np.array([0.482175,np.nan,np.nan,np.nan,np.nan,np.nan,0.469099,0.398838])
plt.scatter(x, y,marker='s',facecolors='none',edgecolors='green',label='L=4,Nq=2,d=2')
plt.grid(b=True, which='both', color='#666666', linestyle='--')
plt.legend(bbox_to_anchor=(1.001, 1), loc='upper left')
plt.xlabel("Circuit ID", fontsize=13)
plt.ylabel("Cost Function After 300 Steps", fontsize=13)
plt.show()
#plot_axes_IdVsCost()
#L,Nq,d,ID
data = np.full((4,4,2,8), np.nan)
data[0,3,0,0]=0.207044
data[0,3,0,3]=0.206619
data[1,2,0,0]=0.376935
data[1,2,0,2]=0.326575
data[1,2,0,3]=0.182479
data[1,3,0,0]=0.400412
data[1,3,0,4]=0.593843
data[1,3,0,5]=0.722007
data[3,0,0,0]=0.116092
data[3,0,0,1]=0.103657
data[3,0,0,2]=0.312526
data[3,1,0,2]=0.375075
data[3,1,0,3]=0.325434
data[3,1,0,6]=0.398591
data[3,1,0,7]=0.660803
data[0,3,1,4]=0.748411
data[1,1,1,6]=0.270515
data[1,1,1,7]=0.92881
data[1,3,1,4]=0.719350
data[1,3,1,7]=0.568995
data[3,1,1,0]=0.482175
data[3,1,1,6]=0.469099
data[3,1,1,7]=0.398838
print(data)
| 34.318182
| 87
| 0.637914
| 1,436
| 6,040
| 2.579387
| 0.071727
| 0.152538
| 0.141739
| 0.202484
| 0.829374
| 0.824244
| 0.782397
| 0.766739
| 0.75621
| 0.739471
| 0
| 0.162368
| 0.088411
| 6,040
| 175
| 88
| 34.514286
| 0.510352
| 0.189901
| 0
| 0.478261
| 0
| 0
| 0.121375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.032609
| 0
| 0.054348
| 0.01087
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3575a76bf4f22f4e90530ba3c00ccf8fddcbf02
| 47,102
|
py
|
Python
|
model.py
|
jiang28/Real-Estate-Hotspot-Prediction
|
d1f231e994f785f1176ff4e4a903077392436279
|
[
"MIT"
] | 1
|
2021-10-29T18:09:34.000Z
|
2021-10-29T18:09:34.000Z
|
model.py
|
jiang28/Real-Estate-Hotspot-Prediction
|
d1f231e994f785f1176ff4e4a903077392436279
|
[
"MIT"
] | null | null | null |
model.py
|
jiang28/Real-Estate-Hotspot-Prediction
|
d1f231e994f785f1176ff4e4a903077392436279
|
[
"MIT"
] | 1
|
2021-06-11T14:06:59.000Z
|
2021-06-11T14:06:59.000Z
|
# -*- coding: utf-8 -*-
"""Copy of transformerRE.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1YxbGe-rpt3B3eq635NxMd3YzcMGwtnBx
"""
from google.colab import drive
drive.mount('/content/drive')
PATH = '/content/drive/My Drive/'
device = 'cuda'
import torch
if torch.cuda.is_available():
device = 'cuda'
else:
device = 'cpu'
print(device)
#load data
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from pandas import read_csv
output_window = 1
def get_data():
data = []
features = []
location = []
label = []
series = read_csv(PATH +'DATA/data_census_update.csv', sep=',', header=None, low_memory=False, keep_default_na=False).to_numpy()
for i in range(len(series)):
if i == 0:
continue
# if '' in series[i]:
# continue
line = series[i][2:-11]
line = [float(_) if _ != '' else -1 for _ in line]
line = [int(j) for j in line]
data.append(line)
f = series[i][-11:-1]
f = [float(_) if _ != '' else -1 for _ in f]
f = [int(_) for _ in f]
features.append(f)
label.append(int(float(series[i][-1])))
data = np.asarray(data)
features = np.asarray(features)
label = np.asarray(label)
data_ = []
features_ = []
for i in range(len(data)):
scaler = MinMaxScaler(feature_range=(0, 1))
x = data[i]
x = scaler.fit_transform(np.array(x).reshape(-1, 1)).reshape(-1)
data_.append(x)
features_.append(scaler.fit_transform(np.array(features[i]).reshape(-1, 1)).reshape(-1))
input_data = torch.FloatTensor(data_)
label = torch.FloatTensor(label)
features = torch.FloatTensor(features_)
samples = int(len(input_data)*0.7)
x_train = input_data[:samples]
x_test = input_data[samples:]
y_train = label[:samples]
y_test = label[samples:]
ftr_train = features[:samples]
ftr_test = features[samples:]
return x_train, y_train, ftr_train, x_test, y_test, ftr_test
if __name__ == '__main__':
x_train, y_train, ftr_train, x_test, y_test, ftr_test = get_data()
print(x_train.shape)
print(y_train.shape)
print(ftr_train.shape)
print(x_test.shape)
print(y_test.shape)
print(ftr_test.shape)
pass
##Transformer
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import argparse
import sys
import math
class PositionalEncoding(nn.Module):
def __init__(self, d_model, max_len=5000,**block_args):
super(PositionalEncoding, self).__init__()
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
pe.requires_grad = True
self.register_buffer('pe', pe)
#self.layers = nn.ModuleList([EncoderBlock(**block_args) for _ in range(num_layers)])
def forward(self, x):
return x + self.pe[:x.size(0), :]
def get_attention_maps(self, x, mask=None):
attention_maps = []
for l in self.layers:
_, attn_map = l.self_attn(x, mask=mask, return_attention=True)
attention_maps.append(attn_map)
x = l(x)
return attention_maps
class Transformer(nn.Module):
def __init__(self, input_size, feature_size=250,num_layers=1,dropout=0.1):
super(Transformer, self).__init__()
self.src_mask = None
self.pos_encoder = PositionalEncoding(feature_size)
self.encoder_layer = nn.TransformerEncoderLayer(d_model=feature_size, nhead=10, dropout=dropout)
self.transformer_encoder = nn.TransformerEncoder(self.encoder_layer, num_layers=num_layers)
self.layer = nn.Linear(feature_size,1)
self.layer1 = nn.Linear(input_size, 200)
self.layer2 = nn.Linear(200, 100)
self.layer3 = nn.Linear(100, 1)
self.last_layer = nn.Sigmoid()
def forward(self, src, ftr):
if self.src_mask is None or self.src_mask.size(0) != len(src):
mask = self._generate_square_subsequent_mask(len(src)).to(device)
self.src_mask = mask
src = self.pos_encoder(src)
hidden = self.transformer_encoder(src,self.src_mask)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.expand(ftr.shape[0], ftr.shape[1], hidden.shape[2])
hidden = torch.cat((hidden, ftr), 0)
output = self.layer(hidden)
output = torch.squeeze(output)
output = output.transpose(0, 1)
output = self.layer1(output)
output = self.layer2(output)
output = self.layer3(output)
output = self.last_layer(output)
return output
def _generate_square_subsequent_mask(self, sz):
mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--mode', choices=['train', 'infer'],\
default='train',help='Run mode')
arg_parser.add_argument('--epoch', default='30', type=int)
arg_parser.add_argument('--batch_size', default='32', type=int)
args = arg_parser.parse_args(args=['--mode', 'train'])
args = arg_parser.parse_args(args=['--epoch', '30'])
args = arg_parser.parse_args(args=['--batch_size', '32'])
model_path = PATH + 'MODEL/model_all_transformer_12.pt'
X_train, y_train, FTR_train, X_test, y_test, FTR_test = get_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
model = Transformer(input_size = X_train.shape[1] + FTR_train.shape[1])
loss_function = nn.MSELoss()
#map = model.get_attention_maps(X_train)
#print(map)
optimizer = optim.SGD(model.parameters(), lr=0.1)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.98)
num_batch = int(len(y_train)/args.batch_size) + 1
num_batch_test = int(len(y_test)/args.batch_size) + 1
last_acc = 0.0
if args.mode == 'train':
for epoch in range(args.epoch):
acc = []
total_loss = 0
acc_test = []
model.to(device)
for i in range(num_batch):
sys.stdout.write('\r{0}/{1}'.format(i, num_batch))
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_train))
x = X_train[st:ed]
x = x.transpose(0, 1)
x = torch.reshape(x, (x.shape[0], x.shape[1], 1))
x = x.to(device)
ftr = FTR_train[st:ed]
ftr = ftr.transpose(0, 1)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.to(device)
label = y_train[st:ed]
label = label.to(device)
model.zero_grad()
output = model(x, ftr)
predict = torch.squeeze(output)
loss = loss_function(predict, label)
acc.append(torch.sum(predict.gt(0.5) == label))
loss.backward()
optimizer.step()
total_loss += loss.item()
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_test = torch.squeeze(output_test)
acc_test.append(torch.sum(predict_test.gt(0.5) == label_test))
total_loss /= len(y_train)
acc = sum(acc)*1.0/len(y_train)
acc_test = sum(acc_test)*1.0/len(y_test)
if epoch%10 == 0 or epoch == args.epoch - 1:
print('\nEpoch: ', epoch)
print('\nTraining set: Loss {0:.4f}. Acc {1:.4f}.\nTest set: Acc {2:.4f}.'.format(total_loss, acc, acc_test))
scheduler.step()
if acc_test > last_acc:
torch.save(model.cpu(), model_path)
last_acc = acc_test
pip install scikit-plot
#Transformer evaluation
from sklearn import metrics
from sklearn.metrics import classification_report, confusion_matrix
model_path = PATH + 'MODEL/model_all_transformer_12.pt'
model = torch.load(model_path)
model.to(device)
acc_test = []
y_pred = []
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_test = torch.squeeze(output_test)
y_pred += predict_test.gt(0.5)
y_pred = list(map(float, y_pred))
y_pred = np.asarray(y_pred)
y_pred = torch.FloatTensor(y_pred)
print('Transformer Results:')
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(metrics.roc_auc_score(y_test, y_pred))
# calculate the fpr and tpr for all thresholds of the classification
fpr, tpr, threshold = metrics.roc_curve(y_test, y_pred)
roc_auc = metrics.auc(fpr, tpr)
# method I: plt
import matplotlib.pyplot as plt
plt.title('Receiver Operating Characteristic')
plt.plot(fpr, tpr, 'b', label = 'AUC = %0.2f' % roc_auc)
plt.legend(loc = 'lower right')
plt.plot([0, 1], [0, 1],'r--',label='Sample Label Red')
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')
plt.savefig(PATH+"roc.png")
#MLP baseline
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import argparse
import sys
import math
class MLP(nn.Module):
def __init__(self, input_size):
super(MLP, self).__init__()
self.layer1 = nn.Linear(input_size, 200)
self.layer2 = nn.Linear(200, 100)
self.layer3 = nn.Linear(100, 1)
self.last_layer = nn.Sigmoid()
def forward(self, src, ftr):
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.expand(ftr.shape[0], ftr.shape[1], src.shape[2])
hidden = torch.cat((src, ftr), 0)
output = torch.squeeze(hidden)
output = output.transpose(0, 1)
output = self.layer1(output)
output = self.layer2(output)
output = self.layer3(output)
output = self.last_layer(output)
return output
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--mode', choices=['train', 'infer'],\
default='train',help='Run mode')
arg_parser.add_argument('--epoch', default='30', type=int)
arg_parser.add_argument('--batch_size', default='32', type=int)
args = arg_parser.parse_args(args=['--mode', 'train'])
args = arg_parser.parse_args(args=['--epoch', '30'])
args = arg_parser.parse_args(args=['--batch_size', '32'])
model_path_mlp = PATH + 'MODEL/model_mlp_temporal.pt'
X_train, y_train, FTR_train, X_test, y_test, FTR_test = get_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
model = MLP(input_size = X_train.shape[1] + FTR_train.shape[1])
loss_function = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.1)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.98)
num_batch = int(len(y_train)/args.batch_size) + 1
num_batch_test = int(len(y_test)/args.batch_size) + 1
last_acc = 0.0
if args.mode == 'train':
for epoch in range(args.epoch):
acc = []
total_loss = 0
acc_test = []
model.to(device)
for i in range(num_batch):
sys.stdout.write('\r{0}/{1}'.format(i, num_batch))
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_train))
x = X_train[st:ed]
x = x.transpose(0, 1)
x = torch.reshape(x, (x.shape[0], x.shape[1], 1))
x = x.to(device)
ftr = FTR_train[st:ed]
ftr = ftr.transpose(0, 1)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.to(device)
label = y_train[st:ed]
label = label.to(device)
model.zero_grad()
output = model(x, ftr)
predict = torch.squeeze(output)
loss = loss_function(predict, label)
acc.append(torch.sum(predict.gt(0.5) == label))
loss.backward()
optimizer.step()
total_loss += loss.item()
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_test = torch.squeeze(output_test)
acc_test.append(torch.sum(predict_test.gt(0.5) == label_test))
total_loss /= len(y_train)
acc = sum(acc)*1.0/len(y_train)
acc_test = sum(acc_test)*1.0/len(y_test)
if epoch%10 == 0 or epoch == args.epoch - 1:
print('\nEpoch: ', epoch)
print('\nTraining set: Loss {0:.4f}. Acc {1:.4f}.\nTest set: Acc {2:.4f}.'.format(total_loss, acc, acc_test))
scheduler.step()
if acc_test > last_acc:
torch.save(model.cpu(), model_path_mlp)
last_acc = acc_test
#MLP evaluation
from sklearn import metrics
from sklearn.metrics import classification_report, confusion_matrix
model = torch.load(model_path_mlp)
model.to(device)
acc_test = []
y_pred = []
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_test = torch.squeeze(output_test)
y_pred += predict_test.gt(0.5)
y_pred = list(map(float, y_pred))
y_pred = np.asarray(y_pred)
y_pred = torch.FloatTensor(y_pred)
print('MLP Results:')
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(metrics.roc_auc_score(y_test, y_pred))
#LSTM baseline
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import argparse
import sys
import math
class LSTM(nn.Module):
def __init__(self, input_size, feature_size=250,num_layers=1,dropout=0.1):
super(LSTM, self).__init__()
self.layer1 = nn.LSTM(input_size, 50)
self.layer2 = nn.Linear(50, 1)
self.last_layer = nn.Sigmoid()
def forward(self, src, ftr):
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.expand(ftr.shape[0], ftr.shape[1], src.shape[2])
hidden = torch.cat((src, ftr), 0)
hidden = hidden.transpose(1, 2)
hidden = hidden.transpose(0, 2)
hidden, (hn, cn) = self.layer1(hidden)
output = self.layer2(hidden)
output = self.last_layer(output)
return output
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--mode', choices=['train', 'infer'],\
default='train',help='Run mode')
arg_parser.add_argument('--epoch', default='30', type=int)
arg_parser.add_argument('--batch_size', default='32', type=int)
args = arg_parser.parse_args(args=['--mode', 'train'])
args = arg_parser.parse_args(args=['--epoch', '30'])
args = arg_parser.parse_args(args=['--batch_size', '32'])
model_path_lstm = PATH + 'MODEL/model_lstm.pt'
X_train, y_train, FTR_train, X_test, y_test, FTR_test = get_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
model = LSTM(input_size = X_train.shape[1] + FTR_train.shape[1])
loss_function = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.1)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.98)
num_batch = int(len(y_train)/args.batch_size) + 1
num_batch_test = int(len(y_test)/args.batch_size) + 1
last_acc = 0.0
if args.mode == 'train':
for epoch in range(args.epoch):
acc = []
total_loss = 0
acc_test = []
model.to(device)
for i in range(num_batch):
sys.stdout.write('\r{0}/{1}'.format(i, num_batch))
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_train))
x = X_train[st:ed]
x = x.transpose(0, 1)
x = torch.reshape(x, (x.shape[0], x.shape[1], 1))
x = x.to(device)
ftr = FTR_train[st:ed]
ftr = ftr.transpose(0, 1)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.to(device)
label = y_train[st:ed]
label = label.to(device)
model.zero_grad()
output = model(x, ftr)
predict = torch.squeeze(output)
loss = loss_function(predict, label)
acc.append(torch.sum(predict.gt(0.5) == label))
loss.backward()
optimizer.step()
total_loss += loss.item()
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_test = torch.squeeze(output_test)
acc_test.append(torch.sum(predict_test.gt(0.5) == label_test))
total_loss /= len(y_train)
acc = sum(acc)*1.0/len(y_train)
acc_test = sum(acc_test)*1.0/len(y_test)
if epoch%10 == 0 or epoch == args.epoch - 1:
print('\nEpoch: ', epoch)
print('\nTraining set: Loss {0:.4f}. Acc {1:.4f}.\nTest set: Acc {2:.4f}.'.format(total_loss, acc, acc_test))
scheduler.step()
if acc_test > last_acc:
torch.save(model.cpu(), model_path_lstm)
last_acc = acc_test
#LSTM evaluation
from sklearn import metrics
from sklearn.metrics import classification_report, confusion_matrix
model = torch.load(model_path_lstm)
model.to(device)
acc_test = []
y_pred = []
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_test = torch.squeeze(output_test)
y_pred += predict_test.gt(0.5)
y_pred = list(map(float, y_pred))
y_pred = np.asarray(y_pred)
y_pred = torch.FloatTensor(y_pred)
print('LSTM Results:')
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(metrics.roc_auc_score(y_test, y_pred))
#load data
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from pandas import read_csv
output_window = 1
def get_data():
data = []
features = []
location = []
label = []
series = read_csv(PATH+'DATA/data_census_hotspot_2.csv', sep=',', header=None, low_memory=False, keep_default_na=False).to_numpy()
for i in range(len(series)):
if i == 0:
continue
# if '' in series[i]:
# continue
line = series[i][2:-12]
line = [float(_) if _ != '' else -1 for _ in line]
line = [int(j) for j in line]
data.append(line)
f = series[i][-12:-2]
f = [float(_) if _ != '' else -1 for _ in f]
f = [int(_) for _ in f]
features.append(f)
idx = int(float(series[i][-1]))
hotspot = [0.] * 4
hotspot[idx] = 1.
label.append(hotspot)
data = np.asarray(data)
features = np.asarray(features)
label = np.asarray(label)
data_ = []
features_ = []
for i in range(len(data)):
scaler = MinMaxScaler(feature_range=(0, 1))
x = data[i]
x = scaler.fit_transform(np.array(x).reshape(-1, 1)).reshape(-1)
data_.append(x)
features_.append(scaler.fit_transform(np.array(features[i]).reshape(-1, 1)).reshape(-1))
input_data = torch.FloatTensor(data_)
label = torch.FloatTensor(label)
features = torch.FloatTensor(features_)
samples = int(len(input_data)*0.7)
x_train = input_data[:samples]
x_test = input_data[samples:]
y_train = label[:samples]
y_test = label[samples:]
ftr_train = features[:samples]
ftr_test = features[samples:]
return x_train, y_train, ftr_train, x_test, y_test, ftr_test
if __name__ == '__main__':
x_train, y_train, ftr_train, x_test, y_test, ftr_test = get_data()
print(x_train.shape)
print(y_train.shape)
print(ftr_train.shape)
print(x_test.shape)
print(y_test.shape)
print(ftr_test.shape)
pass
#LSTM city prediction
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import argparse
import sys
import math
class LSTM(nn.Module):
def __init__(self, input_size, feature_size=250,num_layers=1,dropout=0.1):
super(LSTM, self).__init__()
self.layer1 = nn.LSTM(input_size, 50)
self.layer2 = nn.Linear(50, 4)
self.last_layer = nn.Sigmoid()
def forward(self, src, ftr):
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.expand(ftr.shape[0], ftr.shape[1], src.shape[2])
hidden = torch.cat((src, ftr), 0)
hidden = hidden.transpose(1, 2)
hidden = hidden.transpose(0, 2)
hidden, (hn, cn) = self.layer1(hidden)
output = self.layer2(hidden)
output = self.last_layer(output)
return output
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--mode', choices=['train', 'infer'],\
default='train',help='Run mode')
arg_parser.add_argument('--epoch', default='100', type=int)
arg_parser.add_argument('--batch_size', default='32', type=int)
args = arg_parser.parse_args(args=['--mode', 'train'])
args = arg_parser.parse_args(args=['--epoch', '100'])
args = arg_parser.parse_args(args=['--batch_size', '32'])
model_path_city_lstm = PATH + 'MODEL/model_lstm_city.pt'
X_train, y_train, FTR_train, X_test, y_test, FTR_test = get_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
model = LSTM(input_size = X_train.shape[1] + FTR_train.shape[1])
loss_function = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.1)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.98)
num_batch = int(len(y_train)/args.batch_size) + 1
num_batch_test = int(len(y_test)/args.batch_size) + 1
last_acc = 0.0
if args.mode == 'train':
for epoch in range(args.epoch):
acc = []
total_loss = 0
acc_test = []
model.to(device)
for i in range(num_batch):
sys.stdout.write('\r{0}/{1}'.format(i, num_batch))
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_train))
x = X_train[st:ed]
x = x.transpose(0, 1)
x = torch.reshape(x, (x.shape[0], x.shape[1], 1))
x = x.to(device)
ftr = FTR_train[st:ed]
ftr = ftr.transpose(0, 1)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.to(device)
label = y_train[st:ed]
label = label.to(device)
model.zero_grad()
output = model(x, ftr)
output = output.transpose(1, 2)
output = torch.squeeze(output)
loss = loss_function(output, label)
predict_max_idx = torch.max(output, 1)[1]
label_max_idx = torch.max(label, 1)[1]
acc.append(torch.sum(predict_max_idx == label_max_idx))
loss.backward()
optimizer.step()
total_loss += loss.item()
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
output_test = output_test.transpose(1, 2)
output_test = torch.squeeze(output_test)
predict_max_idx_test = torch.max(output_test, 1)[1]
label_max_idx_test = torch.max(label_test, 1)[1]
acc_test.append(torch.sum(predict_max_idx_test == label_max_idx_test))
total_loss /= len(y_train)
acc = sum(acc)*1.0/len(y_train)
acc_test = sum(acc_test)*1.0/len(y_test)
if epoch%10 == 0 or epoch == args.epoch - 1:
print('\nEpoch: ', epoch)
print('\nTraining set: Loss {0:.4f}. Acc {1:.4f}.\nTest set: Acc {2:.4f}.'.format(total_loss, acc, acc_test))
scheduler.step()
if acc_test > last_acc:
torch.save(model.cpu(), model_path_city_lstm)
last_acc = acc_test
#City prediction evaluation LSTM
from sklearn import metrics
from sklearn.metrics import classification_report, confusion_matrix
model = torch.load(model_path_city_lstm)
model.to(device)
acc_test = []
y_pred = []
y_label = []
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
output_test = output_test.transpose(1, 2)
output_test = torch.squeeze(output_test)
predict_max_idx_test = torch.max(output_test, 1)[1]
label_max_idx_test = torch.max(label_test, 1)[1]
y_pred += predict_max_idx_test
y_label += label_max_idx_test
y_pred = list(map(float, y_pred))
y_pred = np.asarray(y_pred)
y_pred = torch.FloatTensor(y_pred)
y_label = list(map(float, y_label))
y_label = np.asarray(y_label)
y_label = torch.FloatTensor(y_label)
print('City Prediction LSTM Results:')
print("Accuracy:",metrics.accuracy_score(y_label, y_pred))
print(confusion_matrix(y_label, y_pred))
print(classification_report(y_label, y_pred))
#MLP city prediction baseline
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import argparse
import sys
import math
class MLP(nn.Module):
def __init__(self, input_size):
super(MLP, self).__init__()
self.layer1 = nn.Linear(input_size, 200)
self.layer2 = nn.Linear(200, 100)
#three possible outcomes
self.layer3 = nn.Linear(100, 4)
self.last_layer = nn.Sigmoid()
def forward(self, src, ftr):
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.expand(ftr.shape[0], ftr.shape[1], src.shape[2])
hidden = torch.cat((src, ftr), 0)
output = torch.squeeze(hidden)
output = output.transpose(0, 1)
output = self.layer1(output)
output = self.layer2(output)
output = self.layer3(output)
output = self.last_layer(output)
return output
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--mode', choices=['train', 'infer'],\
default='train',help='Run mode')
arg_parser.add_argument('--epoch', default='30', type=int)
arg_parser.add_argument('--batch_size', default='32', type=int)
args = arg_parser.parse_args(args=['--mode', 'train'])
args = arg_parser.parse_args(args=['--epoch', '30'])
args = arg_parser.parse_args(args=['--batch_size', '32'])
model_path_city = PATH + 'MODEL/model_mlp_city.pt'
X_train, y_train, FTR_train, X_test, y_test, FTR_test = get_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
model = MLP(input_size = X_train.shape[1] + FTR_train.shape[1])
#model = MLP(input_size = X_train.shape[1])
loss_function = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.1)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.98)
num_batch = int(len(y_train)/args.batch_size) + 1
num_batch_test = int(len(y_test)/args.batch_size) + 1
last_acc = 0.0
if args.mode == 'train':
for epoch in range(args.epoch):
acc = []
total_loss = 0
acc_test = []
model.to(device)
for i in range(num_batch):
sys.stdout.write('\r{0}/{1}'.format(i, num_batch))
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_train))
x = X_train[st:ed]
x = x.transpose(0, 1)
x = torch.reshape(x, (x.shape[0], x.shape[1], 1))
x = x.to(device)
ftr = FTR_train[st:ed]
ftr = ftr.transpose(0, 1)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.to(device)
label = y_train[st:ed]
label = label.to(device)
model.zero_grad()
output = model(x, ftr)
#predict = torch.squeeze(output)
loss = loss_function(output, label)
predict_max_idx = torch.max(output, 1)[1]
label_max_idx = torch.max(label, 1)[1]
print('label_max_idx: ',label_max_idx.shape)
print('label_max_idx: ',label_max_idx)
acc.append(torch.sum(predict_max_idx == label_max_idx))
loss.backward()
optimizer.step()
total_loss += loss.item()
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
#predict_test = torch.squeeze(output_test)
#print("predict test: ", predict_test)
#print("label test: ", label_test)
predict_max_idx_test = torch.max(output_test, 1)[1]
label_max_idx_test = torch.max(label_test, 1)[1]
#print(label_max_idx_test)
acc_test.append(torch.sum(predict_max_idx_test == label_max_idx_test))
total_loss /= len(y_train)
acc = sum(acc)*1.0/len(y_train)
acc_test = sum(acc_test)*1.0/len(y_test)
#print(acc)
#print(total_loss)
if epoch%10 == 0 or epoch == args.epoch - 1:
print('\nEpoch: ', epoch)
print('\nTraining set: Loss {0:.4f}. Acc {1:.4f}.\nTest set: Acc {2:.4f}.'.format(total_loss, acc, acc_test))
scheduler.step()
if acc_test > last_acc:
torch.save(model.cpu(), model_path_city)
last_acc = acc_test
#MLP evaluation
from sklearn import metrics
from sklearn.metrics import classification_report, confusion_matrix
model = torch.load(model_path_city)
model.to(device)
acc_test = []
y_pred = []
y_label = []
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_max_idx_test = torch.max(output_test, 1)[1]
label_max_idx_test = torch.max(label_test, 1)[1]
y_pred += predict_max_idx_test
y_label += label_max_idx_test
y_pred = list(map(float, y_pred))
y_pred = np.asarray(y_pred)
y_pred = torch.FloatTensor(y_pred)
y_label = list(map(float, y_label))
y_label = np.asarray(y_label)
y_label = torch.FloatTensor(y_label)
print('MLP City Prediction Results:')
print("Accuracy:",metrics.accuracy_score(y_label, y_pred))
print(confusion_matrix(y_label, y_pred))
print(classification_report(y_label, y_pred))
#print(metrics.roc_auc_score(y_test, y_pred))
#Transformer city prediction
from __future__ import print_function
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import argparse
import sys
import math
class PositionalEncoding(nn.Module):
def __init__(self, d_model, max_len=5000):
super(PositionalEncoding, self).__init__()
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
pe.requires_grad = True
self.register_buffer('pe', pe)
def forward(self, x):
return x + self.pe[:x.size(0), :]
class Transformer(nn.Module):
def __init__(self, input_size, feature_size=250,num_layers=1,dropout=0.1):
super(Transformer, self).__init__()
self.src_mask = None
self.pos_encoder = PositionalEncoding(feature_size)
self.encoder_layer = nn.TransformerEncoderLayer(d_model=feature_size, nhead=10, dropout=dropout)
self.transformer_encoder = nn.TransformerEncoder(self.encoder_layer, num_layers=num_layers)
self.layer = nn.Linear(feature_size,1)
#self.layer1 = nn.Linear(input_size, 200)
self.layer1 = nn.Linear(input_size, 200)
self.layer2 = nn.Linear(200, 100)
self.layer3 = nn.Linear(100, 4)
self.last_layer = nn.Sigmoid()
def forward(self, src, ftr):
if self.src_mask is None or self.src_mask.size(0) != len(src):
mask = self._generate_square_subsequent_mask(len(src)).to(device)
self.src_mask = mask
src = self.pos_encoder(src)
hidden = self.transformer_encoder(src,self.src_mask)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.expand(ftr.shape[0], ftr.shape[1], hidden.shape[2])
hidden = torch.cat((hidden, ftr), 0)
output = self.layer(hidden)
output = torch.squeeze(output)
output = output.transpose(0, 1)
#print('shape5', src.shape)
output = self.layer1(output)
#print('shape5', src.shape)
output = self.layer2(output)
output = self.layer3(output)
output = self.last_layer(output)
return output
def _generate_square_subsequent_mask(self, sz):
mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
#attention plot to get the dot product
def scaled_dot_product(q, k, v, mask=None):
d_k = q.size()[-1]
attn_logits = torch.matmul(q, k.transpose(-2, -1))
attn_logits = attn_logits / math.sqrt(d_k)
if mask is not None:
attn_logits = attn_logits.masked_fill(mask == 0, -9e15)
attention = F.softmax(attn_logits, dim=-1)
values = torch.matmul(attention, v)
return values, attention
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--mode', choices=['train', 'infer'],\
default='train',help='Run mode')
arg_parser.add_argument('--epoch', default='100', type=int)
arg_parser.add_argument('--batch_size', default='32', type=int)
args = arg_parser.parse_args(args=['--mode', 'train'])
args = arg_parser.parse_args(args=['--epoch', '100'])
args = arg_parser.parse_args(args=['--batch_size', '32'])
model_path_city = PATH + 'MODEL/model_all_city_hotspot_3.pt'
X_train, y_train, FTR_train, X_test, y_test, FTR_test = get_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
#choose temporal or census or both
model = Transformer(input_size = X_train.shape[1] + FTR_train.shape[1])
#model = Transformer(input_size = X_train.shape[1])
loss_function = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.1)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 1.0, gamma=0.98)
num_batch = int(len(y_train)/args.batch_size) + 1
num_batch_test = int(len(y_test)/args.batch_size) + 1
last_acc = 0.0
if args.mode == 'train':
for epoch in range(args.epoch):
acc = []
total_loss = 0
acc_test = []
model.to(device)
for i in range(num_batch):
sys.stdout.write('\r{0}/{1}'.format(i, num_batch))
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_train))
x = X_train[st:ed]
x = x.transpose(0, 1)
x = torch.reshape(x, (x.shape[0], x.shape[1], 1))
x = x.to(device)
ftr = FTR_train[st:ed]
ftr = ftr.transpose(0, 1)
ftr = torch.reshape(ftr, (ftr.shape[0], ftr.shape[1], 1))
ftr = ftr.to(device)
label = y_train[st:ed]
label = label.to(device)
model.zero_grad()
output = model(x, ftr)
loss = loss_function(output, label)
predict_max_idx = torch.max(output, 1)[1]
label_max_idx = torch.max(label, 1)[1]
acc.append(torch.sum(predict_max_idx == label_max_idx))
loss.backward()
optimizer.step()
total_loss += loss.item()
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_max_idx_test = torch.max(output_test, 1)[1]
label_max_idx_test = torch.max(label_test, 1)[1]
acc_test.append(torch.sum(predict_max_idx_test == label_max_idx_test))
total_loss /= len(y_train)
acc = sum(acc)*1.0/len(y_train)
acc_test = sum(acc_test)*1.0/len(y_test)
if epoch%10 == 0 or epoch == args.epoch - 1:
print('\nEpoch: ', epoch)
print('\nTraining set: Loss {0:.4f}. Acc {1:.4f}.\nTest set: Acc {2:.4f}.'.format(total_loss, acc, acc_test))
scheduler.step()
if acc_test > last_acc:
torch.save(model.cpu(), model_path_city)
last_acc = acc_test
#City prediction evaluation
from sklearn import metrics
from sklearn.metrics import classification_report, confusion_matrix
model = torch.load(model_path_city)
model.to(device)
acc_test = []
y_pred = []
y_label = []
for i in range(num_batch_test):
st = i * args.batch_size
ed = min((i+1) * args.batch_size, len(y_test))
x_test = X_test[st:ed]
x_test = x_test.transpose(0, 1)
x_test = torch.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
x_test = x_test.to(device)
ftr_test = FTR_test[st:ed]
ftr_test = ftr_test.transpose(0, 1)
ftr_test = torch.reshape(ftr_test, (ftr_test.shape[0], ftr_test.shape[1], 1))
ftr_test = ftr_test.to(device)
label_test = y_test[st:ed]
label_test = label_test.to(device)
output_test = model(x_test, ftr_test)
predict_max_idx_test = torch.max(output_test, 1)[1]
label_max_idx_test = torch.max(label_test, 1)[1]
y_pred += predict_max_idx_test
y_label += label_max_idx_test
y_pred = list(map(float, y_pred))
y_pred = np.asarray(y_pred)
y_pred = torch.FloatTensor(y_pred)
y_label = list(map(float, y_label))
y_label = np.asarray(y_label)
y_label = torch.FloatTensor(y_label)
print('City Prediction Results:')
print("Accuracy:",metrics.accuracy_score(y_label, y_pred))
print(confusion_matrix(y_label, y_pred))
print(classification_report(y_label, y_pred))
#skplt.metrics.plot_confusion_matrix(y_label, y_pred, normalize=True)
#plt.savefig(PATH+'test_metric.png', dpi=300)
| 31.54856
| 134
| 0.598446
| 6,744
| 47,102
| 3.947657
| 0.049377
| 0.028547
| 0.028922
| 0.01803
| 0.938136
| 0.932577
| 0.929197
| 0.925065
| 0.923938
| 0.921459
| 0
| 0.023849
| 0.270031
| 47,102
| 1,493
| 135
| 31.54856
| 0.750429
| 0.02501
| 0
| 0.931166
| 1
| 0.005736
| 0.038886
| 0.005033
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.001912
| 0.083174
| null | null | 0.080306
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6ea140305b6c93676a9f832138a7a9f40c838a04
| 31,845
|
py
|
Python
|
network/xception.py
|
AssassionXY/HOR
|
a4c91d90a59eb2b144d827afff626b7eac907320
|
[
"Apache-2.0"
] | null | null | null |
network/xception.py
|
AssassionXY/HOR
|
a4c91d90a59eb2b144d827afff626b7eac907320
|
[
"Apache-2.0"
] | null | null | null |
network/xception.py
|
AssassionXY/HOR
|
a4c91d90a59eb2b144d827afff626b7eac907320
|
[
"Apache-2.0"
] | null | null | null |
"""
Ported to pytorch thanks to [tstandley](https://github.com/tstandley/Xception-PyTorch)
@author: tstandley
Adapted by cadene
Creates an Xception Model as defined in:
Francois Chollet
Xception: Deep Learning with Depthwise Separable Convolutions
https://arxiv.org/pdf/1610.02357.pdf
This weights ported from the Keras implementation. Achieves the following performance on the validation set:
Loss:0.9173 Prec@1:78.892 Prec@5:94.292
REMEMBER to set your image size to 3x299x299 for both test and validation
normalize = transforms.Normalize(mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5])
The resize parameter of the validation transform should be 333, and make sure to center crop at 299x299
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from torch.nn import init
from .bam import *
pretrained_settings = {
'xception': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth',
'input_space': 'RGB',
'input_size': [3, 299, 299],
'input_range': [0, 1],
'mean': [0.5, 0.5, 0.5],
'std': [0.5, 0.5, 0.5],
'num_classes': 1000,
'scale': 0.8975 # The resize parameter of the validation transform should be 333, and make sure to center crop at 299x299
}
}
}
class SeparableConv2d(nn.Module):
def __init__(self,in_channels,out_channels,kernel_size=1,stride=1,padding=0,dilation=1,bias=False):
super(SeparableConv2d,self).__init__()
self.conv1 = nn.Conv2d(in_channels,in_channels,kernel_size,stride,padding,dilation,groups=in_channels,bias=bias)
self.pointwise = nn.Conv2d(in_channels,out_channels,1,1,0,1,1,bias=bias)
def forward(self,x):
x = self.conv1(x)
x = self.pointwise(x)
return x
class Block(nn.Module):
def __init__(self,in_filters,out_filters,reps,strides=1,start_with_relu=True,grow_first=True):
super(Block, self).__init__()
#self.bam = BAM(out_filters)
if out_filters != in_filters or strides!=1:
self.skip = nn.Conv2d(in_filters,out_filters,1,stride=strides, bias=False)
self.skipbn = nn.BatchNorm2d(out_filters)
else:
self.skip=None
self.relu = nn.ReLU(inplace=True)
rep=[]
filters=in_filters
if grow_first:
rep.append(self.relu)
rep.append(SeparableConv2d(in_filters,out_filters,3,stride=1,padding=1,bias=False))
rep.append(nn.BatchNorm2d(out_filters))
filters = out_filters
for i in range(reps-1):
rep.append(self.relu)
rep.append(SeparableConv2d(filters,filters,3,stride=1,padding=1,bias=False))
rep.append(nn.BatchNorm2d(filters))
if not grow_first:
rep.append(self.relu)
rep.append(SeparableConv2d(in_filters,out_filters,3,stride=1,padding=1,bias=False))
rep.append(nn.BatchNorm2d(out_filters))
if not start_with_relu:
rep = rep[1:]
else:
rep[0] = nn.ReLU(inplace=False)
if strides != 1:
#rep.append(self.bam)
rep.append(nn.MaxPool2d(3,strides,1))
self.rep = nn.Sequential(*rep)
def forward(self,inp):
x = self.rep(inp)
if self.skip is not None:
skip = self.skip(inp)
skip = self.skipbn(skip)
else:
skip = inp
x+=skip
return x
class Xception_LSTM(nn.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=1000):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception_LSTM, self).__init__()
self.num_classes = num_classes
#self.conv1 = nn.Conv2d(15,32,3,2,0,bias=False)
self.conv1 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32,64,3,bias=False)
self.bn2 = nn.BatchNorm2d(64)
#do bam here
# self.bam1 = BAM(32)
# self.bam2 = BAM(64)
# self.bam3 = BAM(128)
#-------------------LSTM----------------------
self.rnn = nn.LSTM(input_size=2048, hidden_size=20, num_layers=2,bidirectional=True)#(input_size,hidden_size,num_layers)
self.input = torch.randn(5, 64, 10)#(seq_len, batch, input_size)#(32,2048,10,10)
self.h0 = torch.randn(4, 32, 20).cuda() #(num_layers,batch,output_size)
self.c0 = torch.randn(4, 32, 20).cuda() #(num_layers,batch,output_size)
#do relu here
self.block1=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block2=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block3=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block4=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block5=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block6=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block7=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block8=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block9=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block10=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block11=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block12=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv3 = SeparableConv2d(1024,1536,3,1,1)
self.bn3 = nn.BatchNorm2d(1536)
#do relu here
self.conv4 = SeparableConv2d(1536,2048,3,1,1)
self.bn4 = nn.BatchNorm2d(2048)
self.fc = nn.Linear(2048, num_classes)########last_linner second parameter is num_classes####################2048---->>>>>>>100
self.fc2 = nn.Linear(40, num_classes)
self.softmax = nn.Softmax()
# #------- init weights --------
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# elif isinstance(m, nn.BatchNorm2d):
# m.weight.data.fill_(1)
# m.bias.data.zero_()
# #-----------------------------
def features(self, input):
x = self.conv1(input) #(32,3,299, 299)
x = self.bn1(x)
x = self.relu(x)
#######
#x=self.bam1(x)#(32,32,149,149)
#print("BAM is using")
#x=self.bam2(x)
#x=self.bam3(x)
#######
x = self.conv2(x) #(32,64, 147, 147)
x = self.bn2(x)#(32,64, 147, 147)
x = self.relu(x)#(32,64, 147, 147)
x = self.block1(x)#(32,128, 74, 74)
x = self.block2(x)#(32,256, 147, 147)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x) #(1024, 299, 299)
x = self.conv3(x) #(1536, 299, 299)
x = self.bn3(x)
x = self.relu(x)#(1536,10,10)
x = self.conv4(x) #(2048, 299, 299)
x = self.bn4(x)#(32,2048,10,10)
y = x
x = x.permute(2,3,0,1).clone()
x=x.view(-1,32,2048).clone()#(10.10.32.2048)->(100.32.2048)
x, (hn, cn) = self.rnn(x, (self.h0, self.c0))#(100.32.2048)->(100.32.40)
x = x.permute(1,2,0).clone()#->(32,40,100)
x=x.view(32,40,10,10).clone()#->(32.40.10.10)
return x#(100,32,40)
def logits(self, features):#[32,2048,10,10]
x = self.relu(features)
# x=self.bam4(x)#[32,2]
# print("using bam in front of pool")
x = F.adaptive_avg_pool2d(x, (1, 1)) # ->(32.40.1.1)
x = x.view(x.size(0), -1)#->(32.40)
x = self.last_linear2(x)#->(32.2)
x= self.softmax(x)
return x
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x
class Xception(nn.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=1000):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception, self).__init__()
self.num_classes = num_classes
#self.conv1 = nn.Conv2d(15,32,3,2,0,bias=False)
self.conv1 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32,64,3,bias=False)
self.bn2 = nn.BatchNorm2d(64)
#do bam here
# self.bam1 = BAM(32)
# self.bam2 = BAM(64)
# self.bam3 = BAM(128)
#do relu here
self.block1=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block2=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block3=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block4=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block5=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block6=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block7=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block8=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block9=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block10=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block11=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block12=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv3 = SeparableConv2d(1024,1536,3,1,1)
self.bn3 = nn.BatchNorm2d(1536)
#do relu here
self.conv4 = SeparableConv2d(1536,2048,3,1,1)
self.bn4 = nn.BatchNorm2d(2048)
self.fc = nn.Linear(2048, num_classes)########last_linner second parameter is num_classes
self.softmax = nn.Softmax()
# #------- init weights --------
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# elif isinstance(m, nn.BatchNorm2d):
# m.weight.data.fill_(1)
# m.bias.data.zero_()
# #-----------------------------
def features(self, input):
x = self.conv1(input) #(32, 299, 299)
x = self.bn1(x)
x = self.relu(x)
#######
#x=self.bam1(x)#(32,32,149,149)
#print("BAM is using")
#x=self.bam2(x)
#x=self.bam3(x)
#######
x = self.conv2(x) #(32,64, 147, 147)
x = self.bn2(x)#(32,64, 147, 147)
x = self.relu(x)#(32,64, 147, 147)
x = self.block1(x)#(32,128, 74, 74)
x = self.block2(x)#(32,256, 147, 147)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x) #(1024, 299, 299)
x = self.conv3(x) #(1536, 299, 299)
x = self.bn3(x)
x = self.relu(x)#(1536,10,10)
x = self.conv4(x) #(2048, 10,10)
x = self.bn4(x)#(32,2048,10,10)
return x
def logits(self, features):#[32,2048,10,10]
x = self.relu(features)
# x=self.bam4(x)#[32,2]
# print("using bam in front of pool")
x = F.adaptive_avg_pool2d(x, (1, 1)) #[32,2048,10,10]-->[32,2048,1,1]
x = x.view(x.size(0), -1)#1front #[32,2048,1,1]-->[32,2048]
x = self.last_linear(x)#[32,2048]-->[32,2]
x= self.softmax(x)
return x#[32,2]
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x
class Xception_Twostream(nn.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=1000):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception_Twostream, self).__init__()
self.num_classes = num_classes
#self.conv1 = nn.Conv2d(15,32,3,2,0,bias=False)
self.conv1 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32,64,3,bias=False)
self.bn2 = nn.BatchNorm2d(64)
#do bam here
self.bam1 = BAM(32)
self.bam12 = BAM(32)
self.bam2 = BAM(64)
self.bam22 = BAM(64)
#-------------------LSTM----------------------
self.rnn = nn.LSTM(input_size=2048, hidden_size=20, num_layers=2,bidirectional=True)#(input_size,hidden_size,num_layers)
self.input = torch.randn(5, 24, 10)#(seq_len, batch, input_size)#(32,2048,10,10)
self.h0 = torch.randn(4, 12, 20).cuda() #(num_layers,batch,output_size)
self.c0 = torch.randn(4, 12, 20).cuda() #(num_layers,batch,output_size)
self.fc2 = nn.Linear(40, num_classes)
#do relu here
self.block1=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block2=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block3=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block4=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block5=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block6=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block7=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block8=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block9=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block10=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block11=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block12=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv3 = SeparableConv2d(1024,1536,3,1,1)
self.bn3 = nn.BatchNorm2d(1536)
#do relu here
self.conv4 = SeparableConv2d(1536,2048,3,1,1)
self.bn4 = nn.BatchNorm2d(2048)
self.fc = nn.Linear(2048, num_classes)########last_linner second parameter is num_classes
self.softmax = nn.Softmax()
####################################two-stream##################################
self.conv12 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn12 = nn.BatchNorm2d(32)
self.conv22 = nn.Conv2d(32,64,3,bias=False)
self.bn22 = nn.BatchNorm2d(64)
#do relu here
self.block21=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block22=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block32=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block42=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block52=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block62=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block72=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block82=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block92=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block120=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block121=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block122=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv32 = SeparableConv2d(1024,1536,3,1,1)
self.bn32 = nn.BatchNorm2d(1536)
#do relu here
self.conv42 = SeparableConv2d(1536,2048,3,1,1)
self.bn42 = nn.BatchNorm2d(2048)
####################################two-stream##################################
def features(self, input1,input2):
x = self.conv1(input1) #(32, 299, 299)
#x=self.bam1(x)
x = self.bn1(x)
x = self.relu(x)
#######
x=self.bam1(x)#(32,32,149,149)
#print("BAM is using")
#x=self.bam2(x)
#x=self.bam3(x)
#######
x = self.conv2(x) #(32,64, 147, 147)
#x = self.bam2(x)
x = self.bn2(x)#(32,64, 147, 147)
x = self.relu(x)#(32,64, 147, 147)
#x = self.bam2(x)
x = self.block1(x)#(32,128, 74, 74) 64->128
x = self.block2(x)#(32,256, 147, 147) 128->256
x = self.block3(x)#256->728
x = self.block4(x)#728->728
x = self.block5(x)#728->728
x = self.block6(x)#728->728
x = self.block7(x)#728->728
x = self.block8(x)#728->728
x = self.block9(x)#728->728
x = self.block10(x)#728->728
x = self.block11(x)#728->728
x = self.block12(x) #(1024, 299, 299) 728->1024
x = self.conv3(x) #(1536, 299, 299)
x = self.bn3(x)
x = self.relu(x)#(1536,10,10)
#x = self.bam2(x)
x = self.conv4(x) #(2048, 299, 299)
cx=x
x = self.bn4(x)
y = self.conv12(input2) #(32, 299, 299)
#y=self.bam12(y)#0.8204
y = self.bn12(y)
y = self.relu(y)
#____________
y=self.bam12(y)
#_____________
y = self.conv22(y) #(32,64, 147, 147)
#y = self.bam22(y)#acc 0.82 dis acc0.80
y = self.bn22(y)#(32,64, 147, 147)
y = self.relu(y)#(32,64, 147, 147)
#y = self.bam22(y)#没效果
y = self.block21(y)#(32,128, 74, 74)
y = self.block22(y)#(32,256, 147, 147)
y = self.block32(y)
y = self.block42(y)
y = self.block52(y)
y = self.block62(y)
y = self.block72(y)
y = self.block82(y)
y = self.block92(y)
y = self.block120(y)
y = self.block121(y)
y = self.block122(y) #(1024, 299, 299)
y = self.conv32(y) #(1536, 299, 299)
y = self.bn32(y)
y = self.relu(y)#(1536,10,10)
#y = self.bam22(y)
y = self.conv42(y)
cy=y
y = self.bn42(y)
y = y.permute(2,3,0,1).clone()
y=y.view(-1,12,2048).clone()#(10.10.32.2048)->(100.32.2048) 第2个是bc
y, (hn,cn) = self.rnn(y, (self.h0,self.c0))#(100.32.2048)->(100.32.40)
y = y.permute(1,2,0).clone()#->(32,40,100)
y=y.view(12,40,10,10).clone()#->(32.40.10.10)第一个是bc
return x,y,cx,cy
def logits(self, features1,features2):#[32,2048,10,10]
x = self.relu(features1)
y = self.relu(features2)
# x=self.bam4(x)#[32,2]
# print("using bam in front of pool")
# y = F.adaptive_avg_pool2d(y, (1, 1))
# y = y.view(y.size(0), -1)#1front
# y = self.last_linear(y)
# oy=y
# y = self.softmax(y)
#___________________
y = F.adaptive_avg_pool2d(y, (1, 1)) # ->(32.40.1.1)
y = y.view(y.size(0), -1)#->(32.40)
y = self.last_linear2(y)#->(32.2)
oy=y
y= self.softmax(y)
#___________________
x = F.adaptive_avg_pool2d(x, (1, 1))
x = x.view(x.size(0), -1)#1front
x = self.last_linear(x)
ox=x
x= self.softmax(x)
return x,ox,y,oy
def forward(self, input):
input1 = input[0]
input2 = input[1]
x,y,cx,cy = self.features(input1,input2)
x,ox,oy,y = self.logits(x,y)
return x,ox,y,oy
class Xception_Twostream_basic(nn.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=1000):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception_Twostream_basic, self).__init__()
self.num_classes = num_classes
#self.conv1 = nn.Conv2d(15,32,3,2,0,bias=False)
self.conv1 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32,64,3,bias=False)
self.bn2 = nn.BatchNorm2d(64)
#do bam here
# self.bam1 = BAM(32)
# self.bam2 = BAM(64)
# self.bam3 = BAM(128)
#do relu here
self.block1=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block2=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block3=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block4=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block5=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block6=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block7=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block8=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block9=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block10=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block11=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block12=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv3 = SeparableConv2d(1024,1536,3,1,1)
self.bn3 = nn.BatchNorm2d(1536)
#do relu here
self.conv4 = SeparableConv2d(1536,2048,3,1,1)
self.bn4 = nn.BatchNorm2d(2048)
self.fc = nn.Linear(2048, num_classes)########last_linner second parameter is num_classes
self.softmax = nn.Softmax()
####################################two-stream##################################
self.conv12 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn12 = nn.BatchNorm2d(32)
self.conv22 = nn.Conv2d(32,64,3,bias=False)
self.bn22 = nn.BatchNorm2d(64)
#do relu here
self.block21=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block22=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block32=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block42=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block52=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block62=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block72=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block82=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block92=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block120=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block121=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block122=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv32 = SeparableConv2d(1024,1536,3,1,1)
self.bn32 = nn.BatchNorm2d(1536)
#do relu here
self.conv42 = SeparableConv2d(1536,2048,3,1,1)
self.bn42 = nn.BatchNorm2d(2048)
####################################two-stream##################################
def features(self, input1,input2):
x = self.conv1(input1) #(32, 299, 299)
x = self.bn1(x)
x = self.relu(x)
#######
#x=self.bam1(x)#(32,32,149,149)
#print("BAM is using")
#x=self.bam2(x)
#x=self.bam3(x)
#######
x = self.conv2(x) #(32,64, 147, 147)
x = self.bn2(x)#(32,64, 147, 147)
x = self.relu(x)#(32,64, 147, 147)
x = self.block1(x)#(32,128, 74, 74) 64->128
x = self.block2(x)#(32,256, 147, 147) 128->256
x = self.block3(x)#256->728
x = self.block4(x)#728->728
x = self.block5(x)#728->728
x = self.block6(x)#728->728
x = self.block7(x)#728->728
x = self.block8(x)#728->728
x = self.block9(x)#728->728
x = self.block10(x)#728->728
x = self.block11(x)#728->728
x = self.block12(x) #(1024, 299, 299) 728->1024
x = self.conv3(x) #(1536, 299, 299)
x = self.bn3(x)
x = self.relu(x)#(1536,10,10)
x = self.conv4(x) #(2048, 299, 299)
cx=x
x = self.bn4(x)
y = self.conv12(input2) #(32, 299, 299)
y = self.bn12(y)
y = self.relu(y)
y = self.conv22(y) #(32,64, 147, 147)
y = self.bn22(y)#(32,64, 147, 147)
y = self.relu(y)#(32,64, 147, 147)
y = self.block21(y)#(32,128, 74, 74)
y = self.block22(y)#(32,256, 147, 147)
y = self.block32(y)
y = self.block42(y)
y = self.block52(y)
y = self.block62(y)
y = self.block72(y)
y = self.block82(y)
y = self.block92(y)
y = self.block120(y)
y = self.block121(y)
y = self.block122(y) #(1024, 299, 299)
y = self.conv32(y) #(1536, 299, 299)
y = self.bn32(y)
y = self.relu(y)#(1536,10,10)
y = self.conv42(y)
cy=y
y = self.bn42(y)
return x,y,cx,cy
def logits(self, features1,features2):#[32,2048,10,10]
x = self.relu(features1)
y = self.relu(features2)
# x=self.bam4(x)#[32,2]
# print("using bam in front of pool")
y = F.adaptive_avg_pool2d(y, (1, 1))
y = y.view(y.size(0), -1)#1front
y = self.last_linear(y)
oy=y
y = self.softmax(y)
x = F.adaptive_avg_pool2d(x, (1, 1))
x = x.view(x.size(0), -1)#1front
x = self.last_linear(x)
ox=x
x= self.softmax(x)
return x,ox,y,oy
def forward(self, input):
input1 = input[0]
input2 = input[1]
x,y,cx,cy = self.features(input1,input2)
x,ox,oy,y = self.logits(x,y)
return x,ox,y,oy
class Xception_concat(nn.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=1000):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception_concat, self).__init__()
self.num_classes = num_classes
#self.conv1 = nn.Conv2d(15,32,3,2,0,bias=False)
self.conv1 = nn.Conv2d(3,32,3,2,0,bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32,64,3,bias=False)
self.bn2 = nn.BatchNorm2d(64)
#do bam here
self.bam1 = BAM(32)
# self.bam2 = BAM(64)
# self.bam3 = BAM(128)
#do relu here
self.block1=Block(64,128,2,2,start_with_relu=False,grow_first=True)
self.block2=Block(128,256,2,2,start_with_relu=True,grow_first=True)
self.block3=Block(256,728,2,2,start_with_relu=True,grow_first=True)
self.block4=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block5=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block6=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block7=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block8=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block9=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block10=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block11=Block(728,728,3,1,start_with_relu=True,grow_first=True)
self.block12=Block(728,1024,2,2,start_with_relu=True,grow_first=False)
self.conv3 = SeparableConv2d(1024,1536,3,1,1)
self.bn3 = nn.BatchNorm2d(1536)
#do relu here
self.conv4 = SeparableConv2d(1536,2048,3,1,1)
self.bn4 = nn.BatchNorm2d(2048)
self.fc = nn.Linear(2048, num_classes)########last_linner second parameter is num_classes
self.softmax = nn.Softmax()
# #------- init weights --------
# for m in self.modules():
# if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
# elif isinstance(m, nn.BatchNorm2d):
# m.weight.data.fill_(1)
# m.bias.data.zero_()
# #-----------------------------
def features(self, input):
x = self.conv1(input) #(32, 299, 299)
x = self.bn1(x)
x = self.relu(x)
#######
x=self.bam1(x)#(32,32,149,149)
#print("BAM is using")
#x=self.bam2(x)
#x=self.bam3(x)
#######
x = self.conv2(x) #(32,64, 147, 147)
x = self.bn2(x)#(32,64, 147, 147)
x = self.relu(x)#(32,64, 147, 147)
x = self.block1(x)#(32,128, 74, 74)
x = self.block2(x)#(32,256, 147, 147)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x) #(1024, 299, 299)
x = self.conv3(x) #(1536, 299, 299)
x = self.bn3(x)
x = self.relu(x)#(1536,10,10)
x = self.conv4(x) #(2048, 299, 299)
x = self.bn4(x)
return x
def logits(self, features):#[32,2048,10,10]
x = self.relu(features)
# x=self.bam4(x)#[32,2]
# print("using bam in front of pool")
x = F.adaptive_avg_pool2d(x, (1, 1))
x = x.view(x.size(0), -1)#1front
x = self.last_linear(x)
x= self.softmax(x)
return x
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x
def xception(num_classes=1000, pretrained='imagenet'):
model = Xception_Twostream_basic(num_classes=num_classes)
if pretrained:
settings = pretrained_settings['xception'][pretrained]
assert num_classes == settings['num_classes'], \
"num_classes should be {}, but is {}".format(settings['num_classes'], num_classes)
model = Xception(num_classes=num_classes)
model.load_state_dict(model_zoo.load_url(settings['url']))
model.input_space = settings['input_space']
model.input_size = settings['input_size']
model.input_range = settings['input_range']
model.mean = settings['mean']
model.std = settings['std']
# TODO: ugly
model.last_linear = model.fc
del model.fc
return model
def xception_concat(num_classes=1000):
model = Xception_concat(num_classes=num_classes)
# TODO: ugly
model.last_linear = model.fc
del model.fc
return model
def xception_lstm(num_classes=1000):
model = Xception_LSTM(num_classes=num_classes)
# TODO: ugly
model.last_linear = model.fc
model.last_linear2= model.fc2
del model.fc
del model.fc2
return model
def xception_twostream(num_classes=1000):
model = Xception_Twostream(num_classes=num_classes)
# TODO: ugly
model.last_linear = model.fc
model.last_linear2= model.fc2
del model.fc
del model.fc2
return model
| 32.728674
| 135
| 0.573465
| 4,820
| 31,845
| 3.671162
| 0.064108
| 0.046341
| 0.063182
| 0.074936
| 0.873298
| 0.862277
| 0.856061
| 0.848658
| 0.83792
| 0.832382
| 0
| 0.134643
| 0.259978
| 31,845
| 972
| 136
| 32.762346
| 0.616227
| 0.211744
| 0
| 0.802513
| 0
| 0
| 0.010632
| 0
| 0
| 0
| 0
| 0.001029
| 0.001795
| 1
| 0.050269
| false
| 0
| 0.012567
| 0
| 0.113106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ee35c6d28483230105031434eeba253a0c039e1
| 268
|
py
|
Python
|
njunmt/encoders/__init__.py
|
whr94621/NJUNMT-tf
|
29e0b0c577ea7c81acdc80e7a94a1c4dfb85c118
|
[
"Apache-2.0"
] | 111
|
2017-12-29T12:48:02.000Z
|
2022-03-15T00:47:13.000Z
|
njunmt/encoders/__init__.py
|
whr94621/NJUNMT-tf
|
29e0b0c577ea7c81acdc80e7a94a1c4dfb85c118
|
[
"Apache-2.0"
] | 3
|
2018-01-27T13:54:42.000Z
|
2020-03-02T03:07:19.000Z
|
njunmt/encoders/__init__.py
|
whr94621/NJUNMT-tf
|
29e0b0c577ea7c81acdc80e7a94a1c4dfb85c118
|
[
"Apache-2.0"
] | 44
|
2017-12-29T12:49:57.000Z
|
2022-02-02T13:25:28.000Z
|
from njunmt.encoders.rnn_encoder import StackBidirectionalRNNEncoder
from njunmt.encoders.rnn_encoder import UnidirectionalRNNEncoder
from njunmt.encoders.rnn_encoder import BiUnidirectionalRNNEncoder
from njunmt.encoders.transformer_encoder import TransformerEncoder
| 53.6
| 68
| 0.910448
| 28
| 268
| 8.571429
| 0.392857
| 0.166667
| 0.3
| 0.2625
| 0.425
| 0.425
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 268
| 4
| 69
| 67
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
42b91d5ac5129057eb9af30481c48346477f5704
| 2,547
|
py
|
Python
|
tests/beams/test_simply_supported.py
|
rozsasarpi/zandbak
|
ba1aac10f0d4611549e7eb2bcb3560e3d81321fc
|
[
"MIT"
] | null | null | null |
tests/beams/test_simply_supported.py
|
rozsasarpi/zandbak
|
ba1aac10f0d4611549e7eb2bcb3560e3d81321fc
|
[
"MIT"
] | null | null | null |
tests/beams/test_simply_supported.py
|
rozsasarpi/zandbak
|
ba1aac10f0d4611549e7eb2bcb3560e3d81321fc
|
[
"MIT"
] | null | null | null |
import numpy as np
from zandbak.beams.simply_supported import (
clamped_clamped_beam_under_point_force,
hinged_clamped_beam_under_point_force,
hinged_hinged_beam_under_point_force,
)
def test_hinged_hinged_beam_under_point_force():
span_length = 10
flexural_stiffness = 3
load_intensity = 2
load_position_from_left_end = span_length / 2
position_from_left_end = load_position_from_left_end
d_x_expected = 1 / 48 * load_intensity * span_length ** 3 / flexural_stiffness
m_x_expected = 1 / 4 * load_intensity * span_length
d_x, _, m_x = hinged_hinged_beam_under_point_force(
span_length=span_length,
flexural_stiffness=flexural_stiffness,
load_intensity=load_intensity,
load_position_from_left_end=load_position_from_left_end,
position_from_left_end=position_from_left_end,
)
np.testing.assert_almost_equal(d_x, d_x_expected)
np.testing.assert_almost_equal(m_x, m_x_expected)
def test_hinged_clamped_beam_under_point_force():
span_length = 10
flexural_stiffness = 3
load_intensity = 2
load_position_from_left_end = span_length / 2
position_from_left_end = load_position_from_left_end
d_x_expected = 7 / 768 * load_intensity * span_length ** 3 / flexural_stiffness
m_x_expected = 5 / 32 * load_intensity * span_length
d_x, _, m_x = hinged_clamped_beam_under_point_force(
span_length=span_length,
flexural_stiffness=flexural_stiffness,
load_intensity=load_intensity,
load_position_from_hinged_end=load_position_from_left_end,
position_from_left_end=position_from_left_end,
)
np.testing.assert_almost_equal(d_x, d_x_expected)
np.testing.assert_almost_equal(m_x, m_x_expected)
def test_clamped_clamped_beam_under_point_force():
span_length = 10
flexural_stiffness = 3
load_intensity = 2
load_position_from_left_end = span_length / 2
position_from_left_end = load_position_from_left_end
d_x_expected = 1 / 192 * load_intensity * span_length ** 3 / flexural_stiffness
m_x_expected = 1 / 8 * load_intensity * span_length
d_x, _, m_x = clamped_clamped_beam_under_point_force(
span_length=span_length,
flexural_stiffness=flexural_stiffness,
load_intensity=load_intensity,
load_position_from_left_end=load_position_from_left_end,
position_from_left_end=position_from_left_end,
)
np.testing.assert_almost_equal(d_x, d_x_expected)
np.testing.assert_almost_equal(m_x, m_x_expected)
| 34.418919
| 83
| 0.763644
| 373
| 2,547
| 4.624665
| 0.123324
| 0.146087
| 0.185507
| 0.22029
| 0.954203
| 0.954203
| 0.898551
| 0.898551
| 0.86087
| 0.822609
| 0
| 0.01725
| 0.180605
| 2,547
| 73
| 84
| 34.890411
| 0.809296
| 0
| 0
| 0.614035
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.052632
| false
| 0
| 0.035088
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
95669c62f4ce77e44f4dc48565d2f5fad95b17ab
| 105
|
py
|
Python
|
allennlp-tutorial/venue/___init__.py
|
thomakl/BDSC
|
3c4f10126ff7dabe631839c47fc2362bf16e8588
|
[
"MIT"
] | null | null | null |
allennlp-tutorial/venue/___init__.py
|
thomakl/BDSC
|
3c4f10126ff7dabe631839c47fc2362bf16e8588
|
[
"MIT"
] | null | null | null |
allennlp-tutorial/venue/___init__.py
|
thomakl/BDSC
|
3c4f10126ff7dabe631839c47fc2362bf16e8588
|
[
"MIT"
] | null | null | null |
from venue.venue_reader import *
from venue.venue_classifier import *
from venue.venue_predictor import *
| 35
| 36
| 0.838095
| 15
| 105
| 5.666667
| 0.4
| 0.317647
| 0.494118
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104762
| 105
| 3
| 37
| 35
| 0.904255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
956c09538661491609ed102c6daa1aba5b9ff823
| 37,798
|
py
|
Python
|
models/rot_networks.py
|
lijx10/rot-equ-net
|
f2f81efc094a36ca9a511d5ce72af7591c6a207f
|
[
"MIT"
] | 26
|
2019-04-01T01:34:38.000Z
|
2021-06-01T10:21:38.000Z
|
models/rot_networks.py
|
lijx10/rot-equ-net
|
f2f81efc094a36ca9a511d5ce72af7591c6a207f
|
[
"MIT"
] | 1
|
2019-11-21T14:43:04.000Z
|
2019-11-23T06:04:17.000Z
|
models/rot_networks.py
|
lijx10/rot-equ-net
|
f2f81efc094a36ca9a511d5ce72af7591c6a207f
|
[
"MIT"
] | 7
|
2019-04-01T12:50:26.000Z
|
2022-02-09T12:55:07.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.modules.batchnorm import _BatchNorm
import numpy as np
import math
import torch.utils.model_zoo as model_zoo
import time
from util import som
from . import operations
from . import rotation_groups
from .layers import *
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import index_max
class RotEncoder(nn.Module):
def __init__(self, opt):
super(RotEncoder, self).__init__()
self.opt = opt
self.feature_num = opt.feature_num
# first PointNet
if self.opt.surface_normal == True:
self.first_pointnet = PointResNet(6, [64, 128, 256, 384], activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
else:
self.first_pointnet = PointResNet(3, [64, 128, 256, 384], activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
if self.opt.som_k >= 2:
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
# second PointNet
self.knnlayer = KNNModule(3 + 384*2, (512, 512), activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
# final PointNet
self.final_pointnet = PointNet(3 + 512*2, (768, self.feature_num), activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
else:
# second PointNet
self.knnlayer = KNNModule(3+384, (512, 512), activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
# final PointNet
self.final_pointnet = PointNet(3+512, (768, self.feature_num), activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
else:
# final PointNet
self.final_pointnet = PointResNet(3+384, (512, 512, 768, self.feature_num), activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
# build som for clustering, node initalization is done in __init__
rows = int(math.sqrt(self.opt.node_num))
cols = rows
self.som_builder = som.BatchSOM(rows, cols, 3, self.opt.gpu_ids[0], self.opt.batch_size)
# masked max
# self.masked_max = operations.MaskedMax(self.opt.node_num)
# padding
self.zero_pad = torch.nn.ZeroPad2d(padding=1)
# === rotation equivariant, configure the rotation matrix === begin ===
self.rotation_matrix_template = torch.zeros((1, self.opt.rot_equivariant_no, 3, 3), dtype=torch.float32) # 1xRx3x3
self.rotation_matrix_template[0, ...].copy_(rotation_groups.get_rotation_group_3x3(self.opt.rot_equivariant_mode, self.opt.rot_equivariant_no))
# === rotation equivariant, configure the rotation matrix === end ===
# debug for DataParallel
# self.pn = PointNet(3, (64, 128, 256, 512, self.feature_num), activation=self.opt.activation,
# normalization=self.opt.normalization,
# momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step,
# bn_momentum_decay=opt.bn_momentum_decay)
def forward(self, x, sn, node, node_knn_I, is_train=False, epoch=None):
'''
:param x: Bx3xN Tensor
:param sn: Bx3xN Tensor
:param node: Bx3xM FloatTensor
:param node_knn_I: BxMxk_som LongTensor
:param is_train: determine whether to add noise in KNNModule
:return:
'''
device = x.device
# optimize the som, access the Tensor's tensor, the optimize function should not modify the tensor
# self.som_builder.optimize(x.data)
# self.som_builder.node.resize_(node.size()).copy_(node)
# modify the x according to the nodes, minus the center
mask, mask_row_max, min_idx = som.query_topk(node, x.data, node.size()[2], k=self.opt.k) # BxkNxnode_num, Bxnode_num, BxkN
mask_row_sum = torch.sum(mask, dim=1) # Bxnode_num
mask = mask.unsqueeze(1) # Bx1xkNxnode_num
# if necessary, stack the x
x_list, sn_list = [], []
for i in range(self.opt.k):
x_list.append(x)
sn_list.append(sn)
x_stack = torch.cat(tuple(x_list), dim=2) # Bx3xkN
sn_stack = torch.cat(tuple(sn_list), dim=2) # Bx3xkN
# re-compute center, instead of using som.node
x_stack_data_unsqueeze = x_stack.data.unsqueeze(3) # BxCxkNx1
x_stack_data_masked = x_stack_data_unsqueeze * mask.float() # BxCxkNxnode_num
cluster_mean = torch.sum(x_stack_data_masked, dim=2) / (mask_row_sum.unsqueeze(1).float()+1e-5) # BxCxnode_num
som_node_cluster_mean = cluster_mean
# ====== rotate the pc, sn & som_node into R number of rotated versions ======
B, R, N, kN, M = x_stack.size()[0], \
self.opt.rot_equivariant_no, \
x.size()[2], x_stack.size()[2], \
node.size()[2]
rotation_matrix = self.rotation_matrix_template.to(device).expand(B, R, 3, 3).detach() # 1xRx3x3 -> BxRx3x3
x_stack_rot = torch.matmul(rotation_matrix, x_stack.unsqueeze(1).expand(B, R, 3, kN)) # BxRx3x3 * BxRx3xkN -> BxRx3xkN
sn_stack_rot = torch.matmul(rotation_matrix, sn_stack.unsqueeze(1).expand(B, R, 3, kN)) # BxRx3xkN
som_node_rot = torch.matmul(rotation_matrix, som_node_cluster_mean.unsqueeze(1).expand(B, R, 3, M)) # BxRx3xM
node_knn_I_rot = node_knn_I.unsqueeze(1).expand(B, R, M, self.opt.som_k).contiguous() # BxRxMxsom_k
mask_rot = mask.unsqueeze(1).expand(B, R, 1, kN, M).contiguous()
min_idx_rot = min_idx.unsqueeze(1).expand(B, R, kN).contiguous()
mask_row_max_rot = mask_row_max.unsqueeze(1).expand(B, R, M).contiguous()
# ====== rotate the pc, sn & som_node into R number of rotated versions ======
# assign each point with a center
# single rotation ------ begin ------
# node_expanded = som_node_cluster_mean.unsqueeze(2) # Bx3x1xM, som.node is Bx3xM
# centers = torch.sum(mask.float() * node_expanded, dim=3).detach() # BxCxkN
#
# x_decentered = (x_stack - centers).detach() # Bx3xkN
# x_augmented = torch.cat((x_decentered, sn_stack), dim=1) # Bx6xkN
# single rotation ------ end ------
# multiple rotations ------ begin ------
node_rot_expanded = som_node_rot.unsqueeze(3) # BxRx3x1xM, som_node_rot is BxRx3xM
# mask: Bx1xkNxM -> BxRx1xkNxM, self.centers_rot: BxRx3xkN
centers_rot = torch.sum(mask_rot.float() * node_rot_expanded, dim=4).detach() # BxRx3xkN
x_decentered_rot = (x_stack_rot - centers_rot).detach() # BxRx3xkN
x_augmented_rot = torch.cat((x_decentered_rot, sn_stack_rot), dim=2) # BxRx6xkN
# multiple rotations ------ end ------
# go through the first PointNet
if self.opt.surface_normal == True:
first_pn_out_rot = self.first_pointnet(
x_augmented_rot.contiguous().view(B*R, 6, kN).contiguous(),
epoch)
else:
first_pn_out_rot = self.first_pointnet(
x_decentered_rot.contiguous().view(B*R, 6, kN).contiguous(),
epoch)
C = first_pn_out_rot.size()[1]
with torch.cuda.device(first_pn_out_rot.get_device()):
gather_index_rot = index_max.forward_cuda(first_pn_out_rot.detach(),
min_idx_rot.contiguous().view(B * R, kN).contiguous().int(), # BxRxkN-> kNxBxR->kN*BR->BR*kN
M).detach().long()
first_pn_out_masked_max_rot = first_pn_out_rot.gather(dim=2,
index=gather_index_rot * mask_row_max_rot.contiguous().view(B*R, M).contiguous().unsqueeze(1).long()) # BRxCxM
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
# first_pn_out_masked_max_rot: BRxCxM
first_pn_out_masked_max_rot_pool = first_pn_out_masked_max_rot.contiguous().view(B, R, C, M).contiguous() # BxRxCxM
first_pn_out_masked_max_rot_pool, _ = torch.max(first_pn_out_masked_max_rot_pool, dim=1, keepdim=True) # BxRxCxM -> Bx1xCxM
first_pn_out_masked_max_rot_pool = first_pn_out_masked_max_rot_pool.expand(B, R, C, M).contiguous() # Bx1xCxM -> BxRxCxM
first_pn_out_masked_max_rot_pool = first_pn_out_masked_max_rot_pool.contiguous().view(B*R,C,M).contiguous() # BRxCxM
first_pn_out_masked_max_rot = torch.cat((first_pn_out_masked_max_rot, first_pn_out_masked_max_rot_pool), dim=1)
if self.opt.som_k >= 2:
# second pointnet, knn search on SOM nodes: ----------------------------------
knn_center_1_rot, knn_feature_1_rot = self.knnlayer(som_node_rot.contiguous().view(B*R, 3, M).contiguous(),
first_pn_out_masked_max_rot,
node_knn_I_rot.contiguous().view(B*R, M, self.opt.som_k).contiguous(),
self.opt.som_k,
self.opt.som_k_type,
epoch)
C2 = knn_feature_1_rot.size()[1]
# final pointnet --------------------------------------------------------------
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
knn_feature_1_rot_pool = knn_feature_1_rot.contiguous().view(B, R, C2, M).contiguous() # B*RxC2xM -> BxRxC2xM
knn_feature_1_rot_pool, _ = torch.max(knn_feature_1_rot_pool, dim=1, keepdim=True) # Bx1xC2xM
knn_feature_1_rot_pool = knn_feature_1_rot_pool.expand(B, R, C2, M).contiguous() # Bx1xC2xM -> BxRxC2xM
knn_feature_1_rot_pool = knn_feature_1_rot_pool.contiguous().view(B*R, C2, M).contiguous()
knn_feature_1_rot = torch.cat((knn_feature_1_rot, knn_feature_1_rot_pool), dim=1)
final_pn_out_rot = self.final_pointnet(torch.cat((knn_center_1_rot, knn_feature_1_rot), dim=1), epoch) # Bx1024xM
else:
# final pointnet --------------------------------------------------------------
final_pn_out_rot = self.final_pointnet(torch.cat((som_node_rot.contiguous().view(B*R, 3, M).contiguous(),
first_pn_out_masked_max_rot),
dim=1),
epoch) # Bx1024xM
# final_pn_out_rot: BRx1024xM
final_pn_out_rot = final_pn_out_rot.contiguous().view(B, R, self.opt.feature_num, M).contiguous()
feature_rot, _ = torch.max(final_pn_out_rot, dim=3, keepdim=False) # BxRxC
feature, _ = torch.max(feature_rot, dim=1, keepdim=False)
# feature = torch.mean(feature_rot, dim=1, keepdim=False)
# # debug using vanilla pointnet
# pn_out = self.pn(x) # BxCxN
# feature, _ = torch.max(pn_out, dim=2, keepdim=False)
return feature
class RotEncoder2D(nn.Module):
def __init__(self, opt):
super(RotEncoder2D, self).__init__()
self.opt = opt
self.feature_num = opt.feature_num
# first PointNet
if self.opt.intensity == True:
self.first_pointnet = PointResNet(3, [64, 64, 128, 128], activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
else:
self.first_pointnet = PointResNet(2, [64, 64, 128, 128], activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
if self.opt.som_k >= 2:
# second PointNet
self.knnlayer = KNNModule(2+128, (256, 256), activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
# final PointNet
self.final_pointnet = PointNet(2+256, (512, self.feature_num), activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
else:
# final PointNet
self.final_pointnet = PointResNet(2+128, (256, 256, 512, self.feature_num), activation=self.opt.activation, normalization=self.opt.normalization,
momentum=opt.bn_momentum, bn_momentum_decay_step=opt.bn_momentum_decay_step, bn_momentum_decay=opt.bn_momentum_decay)
# build som for clustering, node initalization is done in __init__
rows = int(math.sqrt(self.opt.node_num))
cols = rows
self.som_builder = som.BatchSOM(rows, cols, 2, self.opt.gpu_ids[0], self.opt.batch_size)
# masked max
# self.masked_max = operations.MaskedMax(self.opt.node_num)
# padding
self.zero_pad = torch.nn.ZeroPad2d(padding=1)
# === rotation equivariant, configure the rotation matrix === begin ===
self.rotation_matrix_template = torch.zeros((1, self.opt.rot_equivariant_no, 2, 2), dtype=torch.float32) # 1xRx2x2
self.rotation_matrix_template[0, ...].copy_(rotation_groups.get_rotation_group_2x2(self.opt.rot_equivariant_no))
# === rotation equivariant, configure the rotation matrix === end ===
def forward(self, x, intensity, node, node_knn_I, is_train=False, epoch=None):
'''
:param x: Bx2xN Tensor
:param intensity: Bx1xN Tensor
:param node: Bx2xM FloatTensor
:param node_knn_I: BxMxk_som LongTensor
:param is_train: determine whether to add noise in KNNModule
:return:
'''
device = x.device
# optimize the som, access the Tensor's tensor, the optimize function should not modify the tensor
# self.som_builder.optimize(x.data)
# self.som_builder.node.resize_(node.size()).copy_(node)
# modify the x according to the nodes, minus the center
mask, mask_row_max, min_idx = som.query_topk(node, x.data, node.size()[2], k=self.opt.k) # BxkNxnode_num, Bxnode_num, BxkN
mask_row_sum = torch.sum(mask, dim=1) # Bxnode_num
mask = mask.unsqueeze(1) # Bx1xkNxnode_num
# if necessary, stack the x
x_list, intensity_list = [], []
for i in range(self.opt.k):
x_list.append(x)
intensity_list.append(intensity)
x_stack = torch.cat(tuple(x_list), dim=2) # Bx2xkN
intensity_stack = torch.cat(tuple(intensity_list), dim=2) # Bx1xkN
# re-compute center, instead of using som.node
x_stack_data_unsqueeze = x_stack.data.unsqueeze(3) # BxCxkNx1
x_stack_data_masked = x_stack_data_unsqueeze * mask.float() # BxCxkNxnode_num
cluster_mean = torch.sum(x_stack_data_masked, dim=2) / (mask_row_sum.unsqueeze(1).float()+1e-5) # BxCxnode_num
som_node_cluster_mean = cluster_mean
# ====== rotate the pc, sn & som_node into R number of rotated versions ======
B, R, N, kN, M = x_stack.size()[0], \
self.opt.rot_equivariant_no, \
x.size()[2], x_stack.size()[2], \
node.size()[2]
rotation_matrix = self.rotation_matrix_template.to(device).expand(B, R, 2, 2).detach() # 1xRx2x2 -> BxRx2x2
x_stack_rot = torch.matmul(rotation_matrix, x_stack.unsqueeze(1).expand(B, R, 2, kN)) # BxRx2x2 * BxRx2xkN -> BxRx2xkN
som_node_rot = torch.matmul(rotation_matrix, som_node_cluster_mean.unsqueeze(1).expand(B, R, 2, M)) # BxRx2xM
intensity_stack_rot = intensity_stack.unsqueeze(1).expand(B, R, 1, kN) # BxRx1xkN
node_knn_I_rot = node_knn_I.unsqueeze(1).expand(B, R, M, self.opt.som_k).contiguous() # BxRxMxsom_k
mask_rot = mask.unsqueeze(1).expand(B, R, 1, kN, M).contiguous()
min_idx_rot = min_idx.unsqueeze(1).expand(B, R, kN).contiguous()
mask_row_max_rot = mask_row_max.unsqueeze(1).expand(B, R, M).contiguous()
# ====== rotate the pc, sn & som_node into R number of rotated versions ======
# assign each point with a center
# single rotation ------ begin ------
# node_expanded = som_node_cluster_mean.unsqueeze(2) # Bx3x1xM, som.node is Bx3xM
# centers = torch.sum(mask.float() * node_expanded, dim=3).detach() # BxCxkN
#
# x_decentered = (x_stack - centers).detach() # Bx3xkN
# x_augmented = torch.cat((x_decentered, sn_stack), dim=1) # Bx6xkN
# single rotation ------ end ------
# multiple rotations ------ begin ------
node_rot_expanded = som_node_rot.unsqueeze(3) # BxRx2x1xM, som_node_rot is BxRx2xM
# mask: Bx1xkNxM -> BxRx1xkNxM, self.centers_rot: BxRx2xkN
centers_rot = torch.sum(mask_rot.float() * node_rot_expanded, dim=4).detach() # BxRx2xkN
x_decentered_rot = (x_stack_rot - centers_rot).detach() # BxRx2xkN
x_augmented_rot = torch.cat((x_decentered_rot, intensity_stack_rot), dim=2) # BxRx3xkN
# multiple rotations ------ end ------
# go through the first PointNet
if self.opt.intensity == True:
first_pn_out_rot = self.first_pointnet(
x_augmented_rot.permute(2, 3, 0, 1).contiguous().view(3, kN, B * R).permute(2, 0, 1).contiguous(),
epoch)
else:
first_pn_out_rot = self.first_pointnet(
x_decentered_rot.permute(2, 3, 0, 1).contiguous().view(2, kN, B * R).permute(2, 0, 1).contiguous(),
epoch)
C = first_pn_out_rot.size()[1]
with torch.cuda.device(first_pn_out_rot.get_device()):
gather_index_rot = index_max.forward_cuda(first_pn_out_rot.detach(),
min_idx_rot.permute(2, 0, 1).contiguous().view(kN, B * R).permute(1, 0).contiguous().int(), # BxRxkN-> kNxBxR->kN*BR->BR*kN
M).detach().long()
first_pn_out_masked_max_rot = first_pn_out_rot.gather(dim=2,
index=gather_index_rot * mask_row_max_rot.permute(2,0,1).contiguous().view(M, B*R).permute(1,0).contiguous().unsqueeze(1).long()) # BRxCxM
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
# first_pn_out_masked_max_rot: BRxCxM
first_pn_out_masked_max_rot = first_pn_out_masked_max_rot.permute(1,2,0).contiguous().view(C, M, B, R).permute(2,3,0,1).contiguous() # BxRxCxM
first_pn_out_masked_max_rot, _ = torch.max(first_pn_out_masked_max_rot, dim=1, keepdim=True) # BxRxCxM -> Bx1xCxM
first_pn_out_masked_max_rot = first_pn_out_masked_max_rot.expand(B, R, C, M).contiguous() # Bx1xCxM -> BxRxCxM
first_pn_out_masked_max_rot = first_pn_out_masked_max_rot.permute(2,3,0,1).contiguous().view(C,M,B*R).permute(2,0,1).contiguous() # BRxCxM
if self.opt.som_k >= 2:
# second pointnet, knn search on SOM nodes: ----------------------------------
knn_center_1_rot, knn_feature_1_rot = self.knnlayer(som_node_rot.permute(2,3,0,1).contiguous().view(2, M, B*R).permute(2,0,1).contiguous(),
first_pn_out_masked_max_rot,
node_knn_I_rot.permute(2,3,0,1).contiguous().view(M, self.opt.som_k, B*R).permute(2,0,1).contiguous(),
self.opt.som_k,
self.opt.som_k_type,
epoch)
C2 = knn_feature_1_rot.size()[1]
# final pointnet --------------------------------------------------------------
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
knn_feature_1_rot = knn_feature_1_rot.permute(1,2,0).contiguous().view(C2, M, B, R).permute(2,3,0,1).contiguous() # B*RxC2xM -> BxRxC2xM
knn_feature_1_rot, _ = torch.max(knn_feature_1_rot, dim=1, keepdim=True) # Bx1xC2xM
knn_feature_1_rot = knn_feature_1_rot.expand(B, R, C2, M).contiguous() # Bx1xC2xM -> BxRxC2xM
knn_feature_1_rot = knn_feature_1_rot.permute(2,3,0,1).contiguous().view(C2, M, B*R).permute(2,0,1).contiguous()
final_pn_out_rot = self.final_pointnet(torch.cat((knn_center_1_rot, knn_feature_1_rot), dim=1), epoch) # Bx1024xM
else:
# final pointnet --------------------------------------------------------------
final_pn_out_rot = self.final_pointnet(torch.cat((som_node_rot.permute(2,3,0,1).contiguous().view(2, M, B*R).permute(2,0,1).contiguous(),
first_pn_out_masked_max_rot),
dim=1),
epoch) # Bx1024xM
# final_pn_out_rot: BRx1024xM
final_pn_out_rot = final_pn_out_rot.permute(1,2,0).contiguous().view(self.opt.feature_num, M, B, R).permute(2,3,0,1).contiguous()
feature_rot, _ = torch.max(final_pn_out_rot, dim=3, keepdim=False) # BxRxC
feature, _ = torch.max(feature_rot, dim=1, keepdim=False)
# # debug using vanilla pointnet
# pn_out = self.pn(x) # BxCxN
# feature, _ = torch.max(pn_out, dim=2, keepdim=False)
# get statistic of rotation max index
if feature_rot.size()[0] == 1:
_, max_idx = torch.max(feature_rot, dim=1, keepdim=False) # BxC
# print(max_idx)
histogram = np.histogram(max_idx.detach().cpu().numpy(), bins=np.asarray(list(range(0, feature_rot.size()[1]+1)))-0.5)
print(histogram)
return feature
class RotEncoderFusion(nn.Module):
def __init__(self, opt):
super(RotEncoderFusion, self).__init__()
self.opt = opt
self.feature_num = opt.feature_num
# first PointNet
self.C1 = 128
if self.opt.surface_normal == True:
self.first_pointnet = PointNet(6, [int(self.C1/2), int(self.C1/2), int(self.C1/2)],
activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
else:
self.first_pointnet = PointNet(3, [int(self.C1/2), int(self.C1/2), int(self.C1/2)],
activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
self.second_pointnet = PointNet(self.C1, [self.C1, self.C1],
activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
self.C2 = 512
if self.opt.som_k >= 2:
# second PointNet
self.knnlayer = KNNFusionModule(3 + self.C1,
(int(self.C2 / 2), int(self.C2 / 2), int(self.C2 / 2)),
(self.C2, self.C2),
activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
# final PointNet
self.final_pointnet = PointNetFusion(3+self.C2, (512, 512, 512), (self.feature_num, self.feature_num),
activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
else:
# final PointNet
self.final_pointnet = PointNetFusion(3+self.C1, (256, 512, 512), (self.feature_num, self.feature_num),
activation=self.opt.activation,
normalization=self.opt.normalization,
momentum=opt.bn_momentum,
bn_momentum_decay_step=opt.bn_momentum_decay_step,
bn_momentum_decay=opt.bn_momentum_decay)
# build som for clustering, node initalization is done in __init__
rows = int(math.sqrt(self.opt.node_num))
cols = rows
self.som_builder = som.BatchSOM(rows, cols, 3, self.opt.gpu_ids[0], self.opt.batch_size)
# masked max
# self.masked_max = operations.MaskedMax(self.opt.node_num)
# padding
self.zero_pad = torch.nn.ZeroPad2d(padding=1)
# === rotation equivariant, configure the rotation matrix === begin ===
self.rotation_matrix_template = torch.zeros((1, self.opt.rot_equivariant_no, 3, 3), dtype=torch.float32) # 1xRx3x3
self.rotation_matrix_template[0, ...].copy_(rotation_groups.get_rotation_group_3x3(self.opt.rot_equivariant_mode, self.opt.rot_equivariant_no))
# === rotation equivariant, configure the rotation matrix === end ===
def forward(self, x, sn, node, node_knn_I, is_train=False, epoch=None):
'''
:param x: Bx3xN Tensor
:param sn: Bx3xN Tensor
:param node: Bx3xM FloatTensor
:param node_knn_I: BxMxk_som LongTensor
:param is_train: determine whether to add noise in KNNModule
:return:
'''
device = x.device
# optimize the som, access the Tensor's tensor, the optimize function should not modify the tensor
# self.som_builder.optimize(x.data)
# self.som_builder.node.resize_(node.size()).copy_(node)
# modify the x according to the nodes, minus the center
mask, mask_row_max, min_idx = som.query_topk(node, x.data, node.size()[2], k=self.opt.k) # BxkNxnode_num, Bxnode_num, BxkN
mask_row_sum = torch.sum(mask, dim=1) # Bxnode_num
mask = mask.unsqueeze(1) # Bx1xkNxnode_num
# if necessary, stack the x
x_list, sn_list = [], []
for i in range(self.opt.k):
x_list.append(x)
sn_list.append(sn)
x_stack = torch.cat(tuple(x_list), dim=2) # Bx3xkN
sn_stack = torch.cat(tuple(sn_list), dim=2) # Bx3xkN
# re-compute center, instead of using som.node
x_stack_data_unsqueeze = x_stack.data.unsqueeze(3) # BxCxkNx1
x_stack_data_masked = x_stack_data_unsqueeze * mask.float() # BxCxkNxnode_num
cluster_mean = torch.sum(x_stack_data_masked, dim=2) / (mask_row_sum.unsqueeze(1).float()+1e-5) # BxCxnode_num
som_node_cluster_mean = cluster_mean
# ====== rotate the pc, sn & som_node into R number of rotated versions ======
B, R, N, kN, M = x_stack.size()[0], \
self.opt.rot_equivariant_no, \
x.size()[2], x_stack.size()[2], \
node.size()[2]
rotation_matrix = self.rotation_matrix_template.to(device).expand(B, R, 3, 3).detach() # 1xRx3x3 -> BxRx3x3
x_stack_rot = torch.matmul(rotation_matrix, x_stack.unsqueeze(1).expand(B, R, 3, kN)) # BxRx3x3 * BxRx3xkN -> BxRx3xkN
sn_stack_rot = torch.matmul(rotation_matrix, sn_stack.unsqueeze(1).expand(B, R, 3, kN)) # BxRx3xkN
som_node_rot = torch.matmul(rotation_matrix, som_node_cluster_mean.unsqueeze(1).expand(B, R, 3, M)) # BxRx3xM
node_knn_I_rot = node_knn_I.unsqueeze(1).expand(B, R, M, self.opt.som_k).contiguous() # BxRxMxsom_k
mask_rot = mask.unsqueeze(1).expand(B, R, 1, kN, M).contiguous()
min_idx_rot = min_idx.unsqueeze(1).expand(B, R, kN).contiguous()
mask_row_max_rot = mask_row_max.unsqueeze(1).expand(B, R, M).contiguous()
# ====== rotate the pc, sn & som_node into R number of rotated versions ======
# assign each point with a center
# single rotation ------ begin ------
# node_expanded = som_node_cluster_mean.unsqueeze(2) # Bx3x1xM, som.node is Bx3xM
# centers = torch.sum(mask.float() * node_expanded, dim=3).detach() # BxCxkN
#
# x_decentered = (x_stack - centers).detach() # Bx3xkN
# x_augmented = torch.cat((x_decentered, sn_stack), dim=1) # Bx6xkN
# single rotation ------ end ------
# multiple rotations ------ begin ------
node_rot_expanded = som_node_rot.unsqueeze(3) # BxRx3x1xM, som_node_rot is BxRx3xM
# mask: Bx1xkNxM -> BxRx1xkNxM, self.centers_rot: BxRx3xkN
centers_rot = torch.sum(mask_rot.float() * node_rot_expanded, dim=4).detach() # BxRx3xkN
x_decentered_rot = (x_stack_rot - centers_rot).detach() # BxRx3xkN
x_augmented_rot = torch.cat((x_decentered_rot, sn_stack_rot), dim=2) # BxRx6xkN
# multiple rotations ------ end ------
# go through the first PointNet
if self.opt.surface_normal == True:
first_pn_out_rot = self.first_pointnet(
x_augmented_rot.contiguous().view(B*R, 6, kN).contiguous(),
epoch)
else:
first_pn_out_rot = self.first_pointnet(
x_decentered_rot.contiguous().view(B*R, 6, kN).contiguous(),
epoch)
C = first_pn_out_rot.size()[1]
# permute and reshape the min_idx, mask_rot, mask_row_max_rot
min_idx_rot = min_idx_rot.contiguous().view(B*R, kN).contiguous() # BxRxkN-> kNxBxR->kN*BR->BR*kN
mask_rot = mask_rot.contiguous().view(B*R, 1, kN, M).contiguous() # BxRx1xkNxM -> 1xkNxMxBxR -> 1xkNxMxBR -> BRx1xkNxM
mask_row_max_rot = mask_row_max_rot.contiguous().view(B*R, M).contiguous().unsqueeze(1).long()
# first_gather_index_rot = self.masked_max.compute(first_pn_out_rot,
# min_idx_rot,
# mask_rot).detach()
with torch.cuda.device(first_pn_out_rot.get_device()):
first_gather_index_rot = index_max.forward_cuda(first_pn_out_rot.detach(), min_idx_rot.int(), M).detach().long()
first_pn_out_masked_max_rot = first_pn_out_rot.gather(dim=2,
index=first_gather_index_rot * mask_row_max_rot) # BRxCxM
# scatter the masked_max back to the kN points
scattered_first_masked_max = torch.gather(first_pn_out_masked_max_rot,
dim=2,
index=min_idx_rot.unsqueeze(1).expand(B*R, first_pn_out_rot.size()[1], kN)) # BRxCxkN
first_pn_out_fusion = torch.cat((first_pn_out_rot, scattered_first_masked_max), dim=1) # BRx2CxkN
second_pn_out = self.second_pointnet(first_pn_out_fusion, epoch)
# second_gather_index_rot = self.masked_max.compute(second_pn_out,
# min_idx_rot,
# mask_rot).detach() # BRxCxM
with torch.cuda.device(second_pn_out.get_device()):
second_gather_index_rot = index_max.forward_cuda(second_pn_out.detach(), min_idx_rot.int(), M).detach().long()
second_pn_out_masked_max_rot = second_pn_out.gather(dim=2,
index=second_gather_index_rot * mask_row_max_rot) # BxCxM
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
# second_pn_out_masked_max_rot: BRxCxM
second_pn_out_masked_max_rot = second_pn_out_masked_max_rot.contiguous().view(B, R, C, M).contiguous() # BxRxCxM
second_pn_out_masked_max_rot, _ = torch.max(second_pn_out_masked_max_rot, dim=1, keepdim=True) # BxRxCxM -> Bx1xCxM
second_pn_out_masked_max_rot = second_pn_out_masked_max_rot.expand(B, R, C, M).contiguous() # Bx1xCxM -> BxRxCxM
second_pn_out_masked_max_rot = second_pn_out_masked_max_rot.contiguous().view(B*R,C,M,).contiguous() # BRxCxM
if self.opt.som_k >= 2:
# second pointnet, knn search on SOM nodes: ----------------------------------
knn_center_1_rot, knn_feature_1_rot = self.knnlayer(som_node_rot.contiguous().view(B*R, 3, M).contiguous(),
second_pn_out_masked_max_rot,
node_knn_I_rot.contiguous().view(B*R, M, self.opt.som_k).contiguous(),
self.opt.som_k,
self.opt.som_k_type,
epoch)
C2 = knn_feature_1_rot.size()[1]
# final pointnet --------------------------------------------------------------
if self.opt.rot_equivariant_pooling_mode == 'per-hierarchy':
knn_feature_1_rot = knn_feature_1_rot.contiguous().view(B, R, C2, M).contiguous() # B*RxC2xM -> BxRxC2xM
knn_feature_1_rot, _ = torch.max(knn_feature_1_rot, dim=1, keepdim=True) # Bx1xC2xM
knn_feature_1_rot = knn_feature_1_rot.expand(B, R, C2, M).contiguous() # Bx1xC2xM -> BxRxC2xM
knn_feature_1_rot = knn_feature_1_rot.contiguous().view(B*R, C2, M).contiguous()
final_pn_out_rot = self.final_pointnet(torch.cat((knn_center_1_rot, knn_feature_1_rot), dim=1), epoch) # Bx1024xM
else:
# final pointnet --------------------------------------------------------------
final_pn_out_rot = self.final_pointnet(torch.cat((som_node_rot.contiguous().view(B*R, 3, M).contiguous(),
second_pn_out_masked_max_rot),
dim=1),
epoch) # Bx1024xM
# final_pn_out_rot: BRx1024xM
final_pn_out_rot = final_pn_out_rot.contiguous().view(B, R, self.opt.feature_num, M).contiguous()
feature_rot, _ = torch.max(final_pn_out_rot, dim=3, keepdim=False) # BxRxC
feature, _ = torch.max(feature_rot, dim=1, keepdim=False)
# # debug using vanilla pointnet
# pn_out = self.pn(x) # BxCxN
# feature, _ = torch.max(pn_out, dim=2, keepdim=False)
return feature
| 59.71248
| 201
| 0.580507
| 4,739
| 37,798
| 4.332771
| 0.062249
| 0.036478
| 0.05552
| 0.027955
| 0.924073
| 0.916768
| 0.905664
| 0.876979
| 0.854624
| 0.834462
| 0
| 0.026821
| 0.30163
| 37,798
| 633
| 202
| 59.71248
| 0.751032
| 0.21689
| 0
| 0.707317
| 0
| 0
| 0.00312
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01626
| false
| 0
| 0.04065
| 0
| 0.073171
| 0.00271
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95997837b4169817b0a275947a99435fe7454cc9
| 26,297
|
py
|
Python
|
tests/market/vip_market_test.py
|
yerlandinata/Win-Win-Trade
|
2d9dd0c46731131239f46dd58ad8a6a95546f0b5
|
[
"MIT"
] | null | null | null |
tests/market/vip_market_test.py
|
yerlandinata/Win-Win-Trade
|
2d9dd0c46731131239f46dd58ad8a6a95546f0b5
|
[
"MIT"
] | 1
|
2021-06-01T21:50:44.000Z
|
2021-06-01T21:50:44.000Z
|
tests/market/vip_market_test.py
|
yerlandinata/Win-Win-Trade
|
2d9dd0c46731131239f46dd58ad8a6a95546f0b5
|
[
"MIT"
] | null | null | null |
from datetime import datetime
import requests
import pytest
from pytest_mock import mocker
from pandas import DataFrame
from src.valid_pairs import *
from src.market import BitcoinIndonesiaMarket
@pytest.fixture()
def market():
return BitcoinIndonesiaMarket()
@pytest.fixture()
def req_mock(mocker, monkeypatch):
mockery = mocker.Mock(return_value=mocker.Mock())
mockery.return_value.content = '{"s":"mock","t":[],"o":"mock","h":"mock","l":"mock","c":"mock","v":"mock"}'
monkeypatch.setattr(requests, 'get', mockery)
return mockery
def test_get_ohlc_with_before_url(market, req_mock):
symbol = BTCIDR
before = 2
after = 1
expected_query = {'symbol': BitcoinIndonesiaMarket.PAIRS[symbol], 'from': str(after), 'to': str(before), 'resolution':'1'}
market.get_ohlc(symbol, after, before=before)
req_mock.assert_called_once_with('https://vip.bitcoin.co.id/tradingview/history', params=expected_query)
def test_get_ohlc_without_before_url(market, req_mock):
symbol = BTCIDR
before = int(datetime.now().timestamp())
after = 1
expected_query = {'symbol': BitcoinIndonesiaMarket.PAIRS[symbol], 'from': str(after), 'to': str(before), 'resolution':'1'}
market.get_ohlc(symbol, after)
req_mock.assert_called_once_with('https://vip.bitcoin.co.id/tradingview/history', params=expected_query)
def test_get_ohlc_invalid_currency(market, req_mock):
symbol = 'USDIDR'
after = 1
with pytest.raises(RuntimeError) as excinfo:
market.get_ohlc(symbol, 1)
assert 'Invalid currency pair: ' + symbol in str(excinfo.value)
def test_parse_ohlc_data(market):
json_str = '{\
"s":"ok",\
"t":[1514861100,1514862000,1514862900,1514863800,1514864700,1514865600,1514866500,1514867400,1514868300,1514869200],\
"c":[217998000,216999000,215845000,215999000,215337000,216541000,215985000,214964000,214850000,215105000],\
"o":[217084000,217998000,216999000,215845000,215999000,215337000,216541000,215985000,214964000,214850000],\
"h":[217998000,218400000,217000000,215999000,216392000,217500000,217397000,215985000,214980000,215184000],\
"l":[217006000,216900000,215000000,215251000,214017000,215000000,215100000,214151000,214202000,214849000],\
"v":[3.18698879,6.82663246,15.68406076,2.15531776,12.87793266,7.0219555,4.56684082,7.37273228,8.14764141,3.09377636]\
}'
price = market.parse_ohlc_data(json_str)
assert list(price.index) == [1514861100, 1514862000, 1514862900, 1514863800, 1514864700, 1514865600, 1514866500, 1514867400, 1514868300, 1514869200]
assert list(price.open) == [217084000, 217998000, 216999000, 215845000, 215999000, 215337000, 216541000, 215985000, 214964000, 214850000]
assert list(price.high) == [217998000, 218400000, 217000000, 215999000, 216392000, 217500000, 217397000, 215985000, 214980000, 215184000]
assert list(price.low) == [217006000, 216900000, 215000000, 215251000, 214017000, 215000000, 215100000, 214151000, 214202000, 214849000]
assert list(price.close) == [217998000, 216999000, 215845000, 215999000, 215337000, 216541000, 215985000, 214964000, 214850000, 215105000]
assert list(price.volume) == [3.18698879, 6.82663246, 15.68406076, 2.15531776, 12.87793266, 7.0219555, 4.56684082, 7.37273228, 8.14764141, 3.09377636]
def test_get_buy_order_book(market, req_mock):
req_mock.return_value.content = '{"buy":[[18853000,"0.15371150"],[18851000,"0.03984096"],[18850000,"0.17848291"],[18802000,"0.01250196"],[18801000,"0.05318866"],[18800000,"3.25905595"],[18778000,"0.01966609"],[18724000,"4.87102424"],[18718000,"0.39542424"],[18702000,"0.00868762"],[18700000,"1.64035101"],[18698000,"0.08022248"],[18684000,"0.02676086"],[18683000,"0.02450516"],[18682000,"15.80000000"],[18681000,"0.04655425"],[18650000,"0.02680965"],[18649000,"0.33554775"],[18611000,"0.78619719"],[18610000,"2.04191295"],[18608000,"0.82924677"],[18606000,"157.35374610"],[18600000,"0.37172897"],[18599000,"0.18216194"],[18575000,"0.05383580"],[18562000,"0.01258409"],[18560000,"0.02693965"],[18558000,"9.24539298"],[18551000,"0.01078108"],[18550000,"3.54418382"],[18537000,"0.14101526"],[18527000,"0.16518189"],[18510000,"0.02160994"],[18502000,"0.31682688"],[18501000,"0.06486135"],[18500000,"5.73903562"],[18499000,"0.27937320"],[18478000,"0.11969823"],[18471000,"0.25544762"],[18449000,"0.38764035"],[18410000,"0.01086366"],[18405000,"0.01901657"],[18404000,"0.06520321"],[18403000,"0.05433896"],[18402000,"0.45245734"],[18400000,"3.02810396"],[18361000,"0.11488546"],[18338000,"0.02726578"],[18333000,"0.18639502"],[18326000,"0.02907895"],[18324000,"0.00545732"],[18323000,"0.02724924"],[18321000,"0.05458217"],[18310000,"0.05461496"],[18307000,"0.02463112"],[18305000,"12.71608877"],[18300000,"2.00142420"],[18299000,"0.01397830"],[18295000,"0.01309702"],[18280000,"2.83853183"],[18268000,"0.06654209"],[18251000,"0.44994279"],[18250000,"0.69844553"],[18248000,"0.44089894"],[18230000,"0.02558321"],[18216000,"0.00586012"],[18212000,"0.06973424"],[18202000,"0.01648170"],[18200000,"3.31165521"],[18199000,"0.16248788"],[18188000,"1.12187645"],[18170000,"0.27517886"],[18160000,"66.07929515"],[18159000,"0.08130089"],[18152000,"0.39441455"],[18151000,"0.13264007"],[18150000,"0.56239862"],[18147000,"0.01102110"],[18144000,"0.09638673"],[18108000,"0.02801452"],[18101000,"0.55245566"],[18100000,"1.18629403"],[18095000,"0.05526388"],[18088000,"1.38213180"],[18075000,"0.03205311"],[18062000,"0.05536485"],[18048000,"0.49531643"],[18018000,"0.01000000"],[18012000,"0.05551854"],[18010000,"0.11104941"],[18008000,"0.01237327"],[18005000,"0.03080088"],[18003000,"0.00555462"],[18001000,"0.19443364"],[18000000,"26.15183333"],[17999000,"0.27854141"],[17960000,"0.10440089"],[17959000,"0.05568238"],[17953000,"0.71025700"],[17950000,"0.59067582"],[17913000,"0.11165075"],[17909000,"0.03147311"],[17900000,"0.06424581"],[17850000,"0.07226890"],[17828000,"0.05784440"],[17820000,"0.01122334"],[17813000,"0.11227755"],[17807000,"0.16847307"],[17800000,"0.10005129"],[17799000,"0.05618293"],[17798000,"0.56186088"],[17774000,"0.20000000"],[17754000,"0.05632533"],[17750000,"1.70834461"],[17740000,"0.02113866"],[17719000,"0.11887995"],[17717000,"0.16932889"],[17713000,"0.11291142"],[17705000,"0.05648121"],[17700000,"0.17302259"],[17699000,"0.05102661"],[17657000,"0.02831738"],[17654000,"0.02135719"],[17650000,"3.27803824"],[17613000,"0.11355248"],[17600000,"0.28409090"],[17599000,"0.00284107"],[17570000,"0.03414968"],[17555000,"0.01139276"],[17553000,"0.01709109"],[17532000,"0.02283350"],[17513000,"0.11420087"],[17510000,"0.02999046"],[17505000,"0.05902890"],[17501000,"10.33972670"],[17500000,"3.87785737"],[17450000,"0.05503650"],[17424000,"0.01147842"],[17413000,"0.11485671"],[17400000,"0.01484419"],[17350000,"0.01440922"],[17313000,"0.11552012"],[17300000,"0.05780346"],[17299000,"0.13006532"],[17253000,"0.01738828"],[17250000,"0.00891426"],[17213000,"0.11619125"],[17200000,"0.12790697"],[17166000,"0.23301875"],[17121000,"0.11681560"]],"sell":[[18898000,"0.01157459"],[18899000,"4.50098492"],[18900000,"0.31818765"],[18906000,"0.66113731"],[18907000,"0.13154571"],[18979000,"15.70000000"],[19000000,"1.25445008"],[19100000,"0.05049293"],[19155000,"9.03914128"],[19173000,"157.20000000"],[19205000,"0.36279273"],[19210000,"0.03051297"],[19270000,"0.53591008"],[19290000,"0.05129825"],[19294000,"0.02879900"],[19296000,"0.04706534"],[19297000,"0.48479566"],[19298000,"0.10404844"],[19300000,"2.16518405"],[19336000,"0.41769567"],[19398000,"0.05131953"],[19400000,"0.54344875"],[19416000,"0.39259558"],[19431000,"0.02132037"],[19456000,"0.07976072"],[19495000,"64.50000000"],[19500000,"2.68438805"],[19502000,"0.32902415"],[19504000,"0.08237701"],[19516000,"0.10057434"],[19523000,"0.01265415"],[19540000,"0.18574165"],[19580000,"0.05003433"],[19600000,"2.32908989"],[19640000,"0.01808019"],[19646000,"0.10000000"],[19667000,"0.05010768"],[19700000,"0.05213042"],[19715000,"0.28119708"],[19743000,"0.13787009"],[19790000,"0.05263157"],[19796000,"0.10048494"],[19800000,"0.11746224"],[19848000,"0.51695616"],[19849000,"0.01490823"],[19850000,"1.83613216"],[19851000,"0.89799799"],[19872000,"0.11000000"],[19876000,"0.01173500"],[19879000,"0.33553227"],[19880000,"2.59918090"],[19890000,"0.47602461"],[19897000,"2.55000000"],[19899000,"0.18427635"],[19900000,"2.18225182"],[19901000,"0.01303044"],[19902000,"0.26253751"],[19903000,"0.03184070"],[19905000,"0.06719900"],[19924000,"0.20450392"],[19945000,"0.38671872"],[19947000,"0.09097550"],[19950000,"0.02141400"],[19951000,"0.05405405"],[19958000,"0.06900412"],[19977000,"0.01317008"],[19992000,"0.78766653"],[19997000,"0.02720477"],[19998000,"0.04376623"],[19999000,"2.22509224"],[20000000,"12.01979485"],[20001000,"0.01442670"],[20010000,"0.02736751"],[20011000,"0.57589323"],[20019000,"0.05225679"],[20037000,"0.01097935"],[20049000,"2.56210000"],[20050000,"0.94853866"],[20058000,"0.01180235"],[20089000,"0.82074578"],[20094000,"0.01000696"],[20099000,"0.20000000"],[20100000,"5.09448347"],[20105000,"0.05007635"],[20108000,"0.04813559"],[20111000,"0.14382494"],[20125000,"0.04755543"],[20130000,"0.01296173"],[20150000,"0.14431676"],[20156000,"0.25005255"],[20168000,"0.09548481"],[20180000,"2.99000000"],[20185000,"0.23745086"],[20196000,"0.01249063"],[20199000,"0.20000000"],[20200000,"0.75835528"],[20201000,"6.16664220"],[20203000,"0.02283532"],[20210000,"0.65194300"],[20250000,"1.73127444"],[20254000,"0.03101615"],[20261000,"0.05395605"],[20270000,"0.01140432"],[20286000,"0.02639481"],[20289000,"0.26762727"],[20299000,"0.22655479"],[20300000,"3.23526146"],[20301000,"0.09217087"],[20337000,"0.58917714"],[20341000,"1.28106274"],[20345000,"0.05003210"],[20350000,"1.01002023"],[20360000,"0.09977549"],[20371000,"0.04930517"],[20388000,"0.50701193"],[20400000,"1.93489369"],[20406000,"0.07022476"],[20412000,"0.05622558"],[20414000,"0.25018354"],[20418000,"0.02198056"],[20438000,"1.57353438"],[20441000,"1.00000000"],[20450000,"0.46705391"],[20453000,"0.05003733"],[20473000,"0.01074575"],[20492000,"0.10000000"],[20495000,"0.02278079"],[20499000,"3.04795342"],[20500000,"32.32824668"],[20502000,"0.19839519"],[20508000,"0.44083337"],[20511000,"0.15000017"],[20512000,"0.05025015"],[20513000,"0.02498376"],[20515000,"0.01668513"],[20532000,"0.19201700"],[20540000,"0.22727960"],[20541000,"1.00000000"],[20550000,"0.44934491"],[20565000,"2.65300000"],[20568000,"5.00000000"],[20588000,"0.02578898"],[20599000,"0.30000000"],[20600000,"10.43480286"],[20606000,"0.06630111"],[20615000,"0.02321350"],[20641000,"0.97898908"],[20648000,"0.01358460"],[20650000,"1.78839367"],[20666000,"0.11387052"]]}'
depth = market.get_buy_order_book(ETHIDR)
# sampling
assert depth.buy[18853000] == 0.15371150
assert depth.buy[18681000] == 0.04655425
req_mock.assert_called_once_with('https://vip.bitcoin.co.id/api/eth_idr/depth')
def test_get_sell_order_book(market, req_mock):
req_mock.return_value.content = '{"buy":[[18853000,"0.15371150"],[18851000,"0.03984096"],[18850000,"0.17848291"],[18802000,"0.01250196"],[18801000,"0.05318866"],[18800000,"3.25905595"],[18778000,"0.01966609"],[18724000,"4.87102424"],[18718000,"0.39542424"],[18702000,"0.00868762"],[18700000,"1.64035101"],[18698000,"0.08022248"],[18684000,"0.02676086"],[18683000,"0.02450516"],[18682000,"15.80000000"],[18681000,"0.04655425"],[18650000,"0.02680965"],[18649000,"0.33554775"],[18611000,"0.78619719"],[18610000,"2.04191295"],[18608000,"0.82924677"],[18606000,"157.35374610"],[18600000,"0.37172897"],[18599000,"0.18216194"],[18575000,"0.05383580"],[18562000,"0.01258409"],[18560000,"0.02693965"],[18558000,"9.24539298"],[18551000,"0.01078108"],[18550000,"3.54418382"],[18537000,"0.14101526"],[18527000,"0.16518189"],[18510000,"0.02160994"],[18502000,"0.31682688"],[18501000,"0.06486135"],[18500000,"5.73903562"],[18499000,"0.27937320"],[18478000,"0.11969823"],[18471000,"0.25544762"],[18449000,"0.38764035"],[18410000,"0.01086366"],[18405000,"0.01901657"],[18404000,"0.06520321"],[18403000,"0.05433896"],[18402000,"0.45245734"],[18400000,"3.02810396"],[18361000,"0.11488546"],[18338000,"0.02726578"],[18333000,"0.18639502"],[18326000,"0.02907895"],[18324000,"0.00545732"],[18323000,"0.02724924"],[18321000,"0.05458217"],[18310000,"0.05461496"],[18307000,"0.02463112"],[18305000,"12.71608877"],[18300000,"2.00142420"],[18299000,"0.01397830"],[18295000,"0.01309702"],[18280000,"2.83853183"],[18268000,"0.06654209"],[18251000,"0.44994279"],[18250000,"0.69844553"],[18248000,"0.44089894"],[18230000,"0.02558321"],[18216000,"0.00586012"],[18212000,"0.06973424"],[18202000,"0.01648170"],[18200000,"3.31165521"],[18199000,"0.16248788"],[18188000,"1.12187645"],[18170000,"0.27517886"],[18160000,"66.07929515"],[18159000,"0.08130089"],[18152000,"0.39441455"],[18151000,"0.13264007"],[18150000,"0.56239862"],[18147000,"0.01102110"],[18144000,"0.09638673"],[18108000,"0.02801452"],[18101000,"0.55245566"],[18100000,"1.18629403"],[18095000,"0.05526388"],[18088000,"1.38213180"],[18075000,"0.03205311"],[18062000,"0.05536485"],[18048000,"0.49531643"],[18018000,"0.01000000"],[18012000,"0.05551854"],[18010000,"0.11104941"],[18008000,"0.01237327"],[18005000,"0.03080088"],[18003000,"0.00555462"],[18001000,"0.19443364"],[18000000,"26.15183333"],[17999000,"0.27854141"],[17960000,"0.10440089"],[17959000,"0.05568238"],[17953000,"0.71025700"],[17950000,"0.59067582"],[17913000,"0.11165075"],[17909000,"0.03147311"],[17900000,"0.06424581"],[17850000,"0.07226890"],[17828000,"0.05784440"],[17820000,"0.01122334"],[17813000,"0.11227755"],[17807000,"0.16847307"],[17800000,"0.10005129"],[17799000,"0.05618293"],[17798000,"0.56186088"],[17774000,"0.20000000"],[17754000,"0.05632533"],[17750000,"1.70834461"],[17740000,"0.02113866"],[17719000,"0.11887995"],[17717000,"0.16932889"],[17713000,"0.11291142"],[17705000,"0.05648121"],[17700000,"0.17302259"],[17699000,"0.05102661"],[17657000,"0.02831738"],[17654000,"0.02135719"],[17650000,"3.27803824"],[17613000,"0.11355248"],[17600000,"0.28409090"],[17599000,"0.00284107"],[17570000,"0.03414968"],[17555000,"0.01139276"],[17553000,"0.01709109"],[17532000,"0.02283350"],[17513000,"0.11420087"],[17510000,"0.02999046"],[17505000,"0.05902890"],[17501000,"10.33972670"],[17500000,"3.87785737"],[17450000,"0.05503650"],[17424000,"0.01147842"],[17413000,"0.11485671"],[17400000,"0.01484419"],[17350000,"0.01440922"],[17313000,"0.11552012"],[17300000,"0.05780346"],[17299000,"0.13006532"],[17253000,"0.01738828"],[17250000,"0.00891426"],[17213000,"0.11619125"],[17200000,"0.12790697"],[17166000,"0.23301875"],[17121000,"0.11681560"]],"sell":[[18898000,"0.01157459"],[18899000,"4.50098492"],[18900000,"0.31818765"],[18906000,"0.66113731"],[18907000,"0.13154571"],[18979000,"15.70000000"],[19000000,"1.25445008"],[19100000,"0.05049293"],[19155000,"9.03914128"],[19173000,"157.20000000"],[19205000,"0.36279273"],[19210000,"0.03051297"],[19270000,"0.53591008"],[19290000,"0.05129825"],[19294000,"0.02879900"],[19296000,"0.04706534"],[19297000,"0.48479566"],[19298000,"0.10404844"],[19300000,"2.16518405"],[19336000,"0.41769567"],[19398000,"0.05131953"],[19400000,"0.54344875"],[19416000,"0.39259558"],[19431000,"0.02132037"],[19456000,"0.07976072"],[19495000,"64.50000000"],[19500000,"2.68438805"],[19502000,"0.32902415"],[19504000,"0.08237701"],[19516000,"0.10057434"],[19523000,"0.01265415"],[19540000,"0.18574165"],[19580000,"0.05003433"],[19600000,"2.32908989"],[19640000,"0.01808019"],[19646000,"0.10000000"],[19667000,"0.05010768"],[19700000,"0.05213042"],[19715000,"0.28119708"],[19743000,"0.13787009"],[19790000,"0.05263157"],[19796000,"0.10048494"],[19800000,"0.11746224"],[19848000,"0.51695616"],[19849000,"0.01490823"],[19850000,"1.83613216"],[19851000,"0.89799799"],[19872000,"0.11000000"],[19876000,"0.01173500"],[19879000,"0.33553227"],[19880000,"2.59918090"],[19890000,"0.47602461"],[19897000,"2.55000000"],[19899000,"0.18427635"],[19900000,"2.18225182"],[19901000,"0.01303044"],[19902000,"0.26253751"],[19903000,"0.03184070"],[19905000,"0.06719900"],[19924000,"0.20450392"],[19945000,"0.38671872"],[19947000,"0.09097550"],[19950000,"0.02141400"],[19951000,"0.05405405"],[19958000,"0.06900412"],[19977000,"0.01317008"],[19992000,"0.78766653"],[19997000,"0.02720477"],[19998000,"0.04376623"],[19999000,"2.22509224"],[20000000,"12.01979485"],[20001000,"0.01442670"],[20010000,"0.02736751"],[20011000,"0.57589323"],[20019000,"0.05225679"],[20037000,"0.01097935"],[20049000,"2.56210000"],[20050000,"0.94853866"],[20058000,"0.01180235"],[20089000,"0.82074578"],[20094000,"0.01000696"],[20099000,"0.20000000"],[20100000,"5.09448347"],[20105000,"0.05007635"],[20108000,"0.04813559"],[20111000,"0.14382494"],[20125000,"0.04755543"],[20130000,"0.01296173"],[20150000,"0.14431676"],[20156000,"0.25005255"],[20168000,"0.09548481"],[20180000,"2.99000000"],[20185000,"0.23745086"],[20196000,"0.01249063"],[20199000,"0.20000000"],[20200000,"0.75835528"],[20201000,"6.16664220"],[20203000,"0.02283532"],[20210000,"0.65194300"],[20250000,"1.73127444"],[20254000,"0.03101615"],[20261000,"0.05395605"],[20270000,"0.01140432"],[20286000,"0.02639481"],[20289000,"0.26762727"],[20299000,"0.22655479"],[20300000,"3.23526146"],[20301000,"0.09217087"],[20337000,"0.58917714"],[20341000,"1.28106274"],[20345000,"0.05003210"],[20350000,"1.01002023"],[20360000,"0.09977549"],[20371000,"0.04930517"],[20388000,"0.50701193"],[20400000,"1.93489369"],[20406000,"0.07022476"],[20412000,"0.05622558"],[20414000,"0.25018354"],[20418000,"0.02198056"],[20438000,"1.57353438"],[20441000,"1.00000000"],[20450000,"0.46705391"],[20453000,"0.05003733"],[20473000,"0.01074575"],[20492000,"0.10000000"],[20495000,"0.02278079"],[20499000,"3.04795342"],[20500000,"32.32824668"],[20502000,"0.19839519"],[20508000,"0.44083337"],[20511000,"0.15000017"],[20512000,"0.05025015"],[20513000,"0.02498376"],[20515000,"0.01668513"],[20532000,"0.19201700"],[20540000,"0.22727960"],[20541000,"1.00000000"],[20550000,"0.44934491"],[20565000,"2.65300000"],[20568000,"5.00000000"],[20588000,"0.02578898"],[20599000,"0.30000000"],[20600000,"10.43480286"],[20606000,"0.06630111"],[20615000,"0.02321350"],[20641000,"0.97898908"],[20648000,"0.01358460"],[20650000,"1.78839367"],[20666000,"0.11387052"]]}'
depth = market.get_sell_order_book(XLMIDR)
# sampling
assert depth.sell[20512000] == 0.05025015
assert depth.sell[19523000] == 0.01265415
req_mock.assert_called_once_with('https://vip.bitcoin.co.id/api/str_idr/depth')
def test_get_best_price(market, req_mock):
req_mock.return_value.content = '{"buy":[[18853000,"0.15371150"],[18851000,"0.03984096"],[18850000,"0.17848291"],[18802000,"0.01250196"],[18801000,"0.05318866"],[18800000,"3.25905595"],[18778000,"0.01966609"],[18724000,"4.87102424"],[18718000,"0.39542424"],[18702000,"0.00868762"],[18700000,"1.64035101"],[18698000,"0.08022248"],[18684000,"0.02676086"],[18683000,"0.02450516"],[18682000,"15.80000000"],[18681000,"0.04655425"],[18650000,"0.02680965"],[18649000,"0.33554775"],[18611000,"0.78619719"],[18610000,"2.04191295"],[18608000,"0.82924677"],[18606000,"157.35374610"],[18600000,"0.37172897"],[18599000,"0.18216194"],[18575000,"0.05383580"],[18562000,"0.01258409"],[18560000,"0.02693965"],[18558000,"9.24539298"],[18551000,"0.01078108"],[18550000,"3.54418382"],[18537000,"0.14101526"],[18527000,"0.16518189"],[18510000,"0.02160994"],[18502000,"0.31682688"],[18501000,"0.06486135"],[18500000,"5.73903562"],[18499000,"0.27937320"],[18478000,"0.11969823"],[18471000,"0.25544762"],[18449000,"0.38764035"],[18410000,"0.01086366"],[18405000,"0.01901657"],[18404000,"0.06520321"],[18403000,"0.05433896"],[18402000,"0.45245734"],[18400000,"3.02810396"],[18361000,"0.11488546"],[18338000,"0.02726578"],[18333000,"0.18639502"],[18326000,"0.02907895"],[18324000,"0.00545732"],[18323000,"0.02724924"],[18321000,"0.05458217"],[18310000,"0.05461496"],[18307000,"0.02463112"],[18305000,"12.71608877"],[18300000,"2.00142420"],[18299000,"0.01397830"],[18295000,"0.01309702"],[18280000,"2.83853183"],[18268000,"0.06654209"],[18251000,"0.44994279"],[18250000,"0.69844553"],[18248000,"0.44089894"],[18230000,"0.02558321"],[18216000,"0.00586012"],[18212000,"0.06973424"],[18202000,"0.01648170"],[18200000,"3.31165521"],[18199000,"0.16248788"],[18188000,"1.12187645"],[18170000,"0.27517886"],[18160000,"66.07929515"],[18159000,"0.08130089"],[18152000,"0.39441455"],[18151000,"0.13264007"],[18150000,"0.56239862"],[18147000,"0.01102110"],[18144000,"0.09638673"],[18108000,"0.02801452"],[18101000,"0.55245566"],[18100000,"1.18629403"],[18095000,"0.05526388"],[18088000,"1.38213180"],[18075000,"0.03205311"],[18062000,"0.05536485"],[18048000,"0.49531643"],[18018000,"0.01000000"],[18012000,"0.05551854"],[18010000,"0.11104941"],[18008000,"0.01237327"],[18005000,"0.03080088"],[18003000,"0.00555462"],[18001000,"0.19443364"],[18000000,"26.15183333"],[17999000,"0.27854141"],[17960000,"0.10440089"],[17959000,"0.05568238"],[17953000,"0.71025700"],[17950000,"0.59067582"],[17913000,"0.11165075"],[17909000,"0.03147311"],[17900000,"0.06424581"],[17850000,"0.07226890"],[17828000,"0.05784440"],[17820000,"0.01122334"],[17813000,"0.11227755"],[17807000,"0.16847307"],[17800000,"0.10005129"],[17799000,"0.05618293"],[17798000,"0.56186088"],[17774000,"0.20000000"],[17754000,"0.05632533"],[17750000,"1.70834461"],[17740000,"0.02113866"],[17719000,"0.11887995"],[17717000,"0.16932889"],[17713000,"0.11291142"],[17705000,"0.05648121"],[17700000,"0.17302259"],[17699000,"0.05102661"],[17657000,"0.02831738"],[17654000,"0.02135719"],[17650000,"3.27803824"],[17613000,"0.11355248"],[17600000,"0.28409090"],[17599000,"0.00284107"],[17570000,"0.03414968"],[17555000,"0.01139276"],[17553000,"0.01709109"],[17532000,"0.02283350"],[17513000,"0.11420087"],[17510000,"0.02999046"],[17505000,"0.05902890"],[17501000,"10.33972670"],[17500000,"3.87785737"],[17450000,"0.05503650"],[17424000,"0.01147842"],[17413000,"0.11485671"],[17400000,"0.01484419"],[17350000,"0.01440922"],[17313000,"0.11552012"],[17300000,"0.05780346"],[17299000,"0.13006532"],[17253000,"0.01738828"],[17250000,"0.00891426"],[17213000,"0.11619125"],[17200000,"0.12790697"],[17166000,"0.23301875"],[17121000,"0.11681560"]],"sell":[[18898000,"0.01157459"],[18899000,"4.50098492"],[18900000,"0.31818765"],[18906000,"0.66113731"],[18907000,"0.13154571"],[18979000,"15.70000000"],[19000000,"1.25445008"],[19100000,"0.05049293"],[19155000,"9.03914128"],[19173000,"157.20000000"],[19205000,"0.36279273"],[19210000,"0.03051297"],[19270000,"0.53591008"],[19290000,"0.05129825"],[19294000,"0.02879900"],[19296000,"0.04706534"],[19297000,"0.48479566"],[19298000,"0.10404844"],[19300000,"2.16518405"],[19336000,"0.41769567"],[19398000,"0.05131953"],[19400000,"0.54344875"],[19416000,"0.39259558"],[19431000,"0.02132037"],[19456000,"0.07976072"],[19495000,"64.50000000"],[19500000,"2.68438805"],[19502000,"0.32902415"],[19504000,"0.08237701"],[19516000,"0.10057434"],[19523000,"0.01265415"],[19540000,"0.18574165"],[19580000,"0.05003433"],[19600000,"2.32908989"],[19640000,"0.01808019"],[19646000,"0.10000000"],[19667000,"0.05010768"],[19700000,"0.05213042"],[19715000,"0.28119708"],[19743000,"0.13787009"],[19790000,"0.05263157"],[19796000,"0.10048494"],[19800000,"0.11746224"],[19848000,"0.51695616"],[19849000,"0.01490823"],[19850000,"1.83613216"],[19851000,"0.89799799"],[19872000,"0.11000000"],[19876000,"0.01173500"],[19879000,"0.33553227"],[19880000,"2.59918090"],[19890000,"0.47602461"],[19897000,"2.55000000"],[19899000,"0.18427635"],[19900000,"2.18225182"],[19901000,"0.01303044"],[19902000,"0.26253751"],[19903000,"0.03184070"],[19905000,"0.06719900"],[19924000,"0.20450392"],[19945000,"0.38671872"],[19947000,"0.09097550"],[19950000,"0.02141400"],[19951000,"0.05405405"],[19958000,"0.06900412"],[19977000,"0.01317008"],[19992000,"0.78766653"],[19997000,"0.02720477"],[19998000,"0.04376623"],[19999000,"2.22509224"],[20000000,"12.01979485"],[20001000,"0.01442670"],[20010000,"0.02736751"],[20011000,"0.57589323"],[20019000,"0.05225679"],[20037000,"0.01097935"],[20049000,"2.56210000"],[20050000,"0.94853866"],[20058000,"0.01180235"],[20089000,"0.82074578"],[20094000,"0.01000696"],[20099000,"0.20000000"],[20100000,"5.09448347"],[20105000,"0.05007635"],[20108000,"0.04813559"],[20111000,"0.14382494"],[20125000,"0.04755543"],[20130000,"0.01296173"],[20150000,"0.14431676"],[20156000,"0.25005255"],[20168000,"0.09548481"],[20180000,"2.99000000"],[20185000,"0.23745086"],[20196000,"0.01249063"],[20199000,"0.20000000"],[20200000,"0.75835528"],[20201000,"6.16664220"],[20203000,"0.02283532"],[20210000,"0.65194300"],[20250000,"1.73127444"],[20254000,"0.03101615"],[20261000,"0.05395605"],[20270000,"0.01140432"],[20286000,"0.02639481"],[20289000,"0.26762727"],[20299000,"0.22655479"],[20300000,"3.23526146"],[20301000,"0.09217087"],[20337000,"0.58917714"],[20341000,"1.28106274"],[20345000,"0.05003210"],[20350000,"1.01002023"],[20360000,"0.09977549"],[20371000,"0.04930517"],[20388000,"0.50701193"],[20400000,"1.93489369"],[20406000,"0.07022476"],[20412000,"0.05622558"],[20414000,"0.25018354"],[20418000,"0.02198056"],[20438000,"1.57353438"],[20441000,"1.00000000"],[20450000,"0.46705391"],[20453000,"0.05003733"],[20473000,"0.01074575"],[20492000,"0.10000000"],[20495000,"0.02278079"],[20499000,"3.04795342"],[20500000,"32.32824668"],[20502000,"0.19839519"],[20508000,"0.44083337"],[20511000,"0.15000017"],[20512000,"0.05025015"],[20513000,"0.02498376"],[20515000,"0.01668513"],[20532000,"0.19201700"],[20540000,"0.22727960"],[20541000,"1.00000000"],[20550000,"0.44934491"],[20565000,"2.65300000"],[20568000,"5.00000000"],[20588000,"0.02578898"],[20599000,"0.30000000"],[20600000,"10.43480286"],[20606000,"0.06630111"],[20615000,"0.02321350"],[20641000,"0.97898908"],[20648000,"0.01358460"],[20650000,"1.78839367"],[20666000,"0.11387052"]]}'
assert market.get_best_price(ETHIDR) == 18875500
def test_get_buy_order_book_invalid_currency(market, req_mock):
currency = 'USDIDR'
with pytest.raises(RuntimeError) as excinfo:
market.get_buy_order_book(currency)
assert 'Invalid currency pair: ' + currency in str(excinfo.value)
def test_get_sell_order_book_invalid_currency(market, req_mock):
currency = 'USDIDR'
with pytest.raises(RuntimeError) as excinfo:
market.get_sell_order_book(currency)
assert 'Invalid currency pair: ' + currency in str(excinfo.value)
| 285.836957
| 7,270
| 0.71415
| 3,290
| 26,297
| 5.677204
| 0.228267
| 0.005996
| 0.004283
| 0.004069
| 0.951226
| 0.946622
| 0.942446
| 0.942446
| 0.9357
| 0.9357
| 0
| 0.643024
| 0.022626
| 26,297
| 91
| 7,271
| 288.978022
| 0.08369
| 0.000646
| 0
| 0.269231
| 0
| 0.128205
| 0.840697
| 0.828481
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.141026
| false
| 0
| 0.089744
| 0.012821
| 0.25641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
95f58a9c080ceaa17e0fb7bf00533792c3669cee
| 14,609
|
py
|
Python
|
tests/unit/hardware/TestMica.py
|
rakhimov/rtk
|
adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/hardware/TestMica.py
|
rakhimov/rtk
|
adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/hardware/TestMica.py
|
rakhimov/rtk
|
adc35e218ccfdcf3a6e3082f6a1a1d308ed4ff63
|
[
"BSD-3-Clause"
] | 2
|
2020-04-03T04:14:42.000Z
|
2021-02-22T05:30:35.000Z
|
#!/usr/bin/env python -O
"""
This is the test class for testing Mica capacitor module algorithms and models.
"""
# -*- coding: utf-8 -*-
#
# tests.unit.TestMica.py is part of The RTK Project
#
# All rights reserved.
import sys
from os.path import dirname
sys.path.insert(0, dirname(dirname(dirname(__file__))) + "/rtk", )
import unittest
from nose.plugins.attrib import attr
from hardware.component.capacitor.fixed.Mica import Button, Mica
__author__ = 'Andrew Rowland'
__email__ = 'andrew.rowland@reliaqual.com'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2015 Andrew "Weibullguy" Rowland'
class TestMicaButtonModel(unittest.TestCase):
"""
Class for testing the Mica Button capacitor data model class.
"""
def setUp(self):
"""
Setup the test fixture for the Capacitor class.
"""
self.DUT = Button()
@attr(all=True, unit=True)
def test_create(self):
"""
(TestMicaButton) __init__ should return a Mica Button capacitor model
"""
self.assertTrue(isinstance(self.DUT, Button))
# Verify Hardware class was properly initialized.
self.assertEqual(self.DUT.revision_id, None)
self.assertEqual(self.DUT.category_id, 0)
# Verify Capacitor class was properly initialized.
self.assertEqual(self.DUT.quality, 0)
# Verify the Mica Button capacitor class was properly initialized.
self.assertEqual(self.DUT._piE, [1.0, 2.0, 10.0, 5.0, 16.0, 5.0, 7.0,
22.0, 28.0, 23.0, 0.5, 13.0, 34.0,
610.0])
self.assertEqual(self.DUT._piQ, [5.0, 15.0])
self.assertEqual(self.DUT._lambdab_count, [0.018, 0.037, 0.19, 0.094,
0.31, 0.10, 0.14, 0.47,
0.60, 0.48, 0.0091, 0.25,
0.68, 11.0])
self.assertEqual(self.DUT.subcategory, 47)
self.assertEqual(self.DUT.specification, 0)
self.assertEqual(self.DUT.spec_sheet, 0)
self.assertEqual(self.DUT.reference_temperature, 358.0)
@attr(all=True, unit=True)
def test_calculate_217_count(self):
"""
(TestMicaButton) calculate_part should return False on success when calculating MIL-HDBK-217F parts count results
"""
self.DUT.quality = 1
self.DUT.environment_active = 5
self.DUT.specification = 2
self.DUT.hazard_rate_type = 1
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'], 0.31)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 5.0)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 1.55E-6)
@attr(all=True, unit=True)
def test_calculate_217_stress_low_temp(self):
"""
(TestMicaButton) calculate_part should return False on success when calculating MIL-HDBK-217F stress results for the 85C specification
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.reference_temperature = 358.0
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ * piE * piCV')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'],
0.014951137)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 5.0)
self.assertEqual(self.DUT.hazard_rate_model['piE'], 2.0)
self.assertAlmostEqual(self.DUT.hazard_rate_model['piCV'], 0.389560899)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 5.82437856E-8)
@attr(all=True, unit=True)
def test_calculate_217_stress_high_temp(self):
"""
(TestMicaButton) calculate_part should return False on success when calculating MIL-HDBK-217F stress results for the 125C specification
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.reference_temperature = 423.0
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ * piE * piCV')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'],
0.011380255)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 5.0)
self.assertEqual(self.DUT.hazard_rate_model['piE'], 2.0)
self.assertAlmostEqual(self.DUT.hazard_rate_model['piCV'], 0.389560899)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 4.43330228E-8)
@attr(all=True, unit=True)
def test_calculate_217_stress_overflow(self):
"""
(TestMicaButton) calculate_part should return True when an OverflowError is raised when calculating MIL-HDBK-217F stress results
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.DUT.reference_temperature = 0.00000001
self.assertTrue(self.DUT.calculate_part())
@attr(all=True, unit=True)
def test_calculate_217_stress_zero_division(self):
"""
(TestMicaButton) calculate_part should return True when a ZeroDivisionError is raised when calculating MIL-HDBK-217F stress results
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.DUT.reference_temperature = 0.0
self.assertTrue(self.DUT.calculate_part())
class TestMicaMicaModel(unittest.TestCase):
"""
Class for testing the Mica capacitor data model class.
"""
def setUp(self):
"""
Setup the test fixture for the Mica Capacitor class.
"""
self.DUT = Mica()
@attr(all=True, unit=True)
def test_create(self):
"""
(TestMicaMica) __init__ should return a Mica capacitor model
"""
self.assertTrue(isinstance(self.DUT, Mica))
# Verify Hardware class was properly initialized.
self.assertEqual(self.DUT.revision_id, None)
self.assertEqual(self.DUT.category_id, 0)
# Verify Capacitor class was properly initialized.
self.assertEqual(self.DUT.quality, 0)
# Verify the Mica capacitor class was properly initialized.
self.assertEqual(self.DUT._piE, [1.0, 2.0, 10.0, 6.0, 16.0, 5.0, 7.0,
22.0, 28.0, 23.0, 0.5, 13.0, 34.0,
610.0])
self.assertEqual(self.DUT._piQ, [0.01, 0.03, 0.1, 0.3, 1.0, 1.5, 3.0,
6.0, 15.0])
self.assertEqual(self.DUT._lambdab_count, [0.0005, 0.0015, 0.0091,
0.0044, 0.014, 0.0068,
0.0095, 0.054, 0.069, 0.031,
0.00025, 0.012, 0.046,
0.45])
self.assertEqual(self.DUT.subcategory, 46)
self.assertEqual(self.DUT.specification, 0)
self.assertEqual(self.DUT.spec_sheet, 0)
self.assertEqual(self.DUT.reference_temperature, 343.0)
@attr(all=True, unit=True)
def test_calculate_217_count(self):
"""
(TestMicaMica) calculate_part should return False on success when calculating MIL-HDBK-217F parts count results
"""
self.DUT.quality = 1
self.DUT.environment_active = 5
self.DUT.specification = 2
self.DUT.hazard_rate_type = 1
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'], 0.014)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 0.01)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 1.4E-10)
@attr(all=True, unit=True)
def test_calculate_217_stress_low_temp(self):
"""
(TestMicaMica) calculate_part should return False on success when calculating MIL-HDBK-217F stress results for the 70C specification
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.reference_temperature = 343.0
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ * piE * piCV')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'],
0.002193033)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 0.01)
self.assertEqual(self.DUT.hazard_rate_model['piE'], 2.0)
self.assertAlmostEqual(self.DUT.hazard_rate_model['piCV'], 0.517134415)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 6.80455807E-10)
@attr(all=True, unit=True)
def test_calculate_217_stress_mid1_temp(self):
"""
(TestMicaMica) calculate_part should return False on success when calculating MIL-HDBK-217F stress results for the 85C specification
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.reference_temperature = 358.0
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ * piE * piCV')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'],
0.001212973)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 0.01)
self.assertEqual(self.DUT.hazard_rate_model['piE'], 2.0)
self.assertAlmostEqual(self.DUT.hazard_rate_model['piCV'], 0.517134415)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 3.76361032E-10)
@attr(all=True, unit=True)
def test_calculate_217_stress_mid2_temp(self):
"""
(TestMicaMica) calculate_part should return False on success when calculating MIL-HDBK-217F stress results for the 125C specification
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.reference_temperature = 398.0
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ * piE * piCV')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'],
0.000311013)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 0.01)
self.assertEqual(self.DUT.hazard_rate_model['piE'], 2.0)
self.assertAlmostEqual(self.DUT.hazard_rate_model['piCV'], 0.517134415)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 9.65013129E-11)
@attr(all=True, unit=True)
def test_calculate_217_stress_high_temp(self):
"""
(TestMicaMica) calculate_part should return False on success when calculating MIL-HDBK-217F stress results for the 150C specification
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.reference_temperature = 423.0
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.assertFalse(self.DUT.calculate_part())
self.assertEqual(self.DUT.hazard_rate_model['equation'],
'lambdab * piQ * piE * piCV')
self.assertAlmostEqual(self.DUT.hazard_rate_model['lambdab'],
0.0001514)
self.assertEqual(self.DUT.hazard_rate_model['piQ'], 0.01)
self.assertEqual(self.DUT.hazard_rate_model['piE'], 2.0)
self.assertAlmostEqual(self.DUT.hazard_rate_model['piCV'], 0.517134415)
self.assertAlmostEqual(self.DUT.hazard_rate_active, 4.69763836E-11)
@attr(all=True, unit=True)
def test_calculate_217_stress_overflow(self):
"""
(TestMicaMica) calculate_part should return True when an OverflowError is raised when calculating MIL-HDBK-217F stress results
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.DUT.reference_temperature = 0.00000001
self.assertTrue(self.DUT.calculate_part())
@attr(all=True, unit=True)
def test_calculate_217_stress_zero_division(self):
"""
(TestMicaMica) calculate_part should return True when a ZeroDivisionError is raised when calculating MIL-HDBK-217F stress results
"""
self.DUT.environment_active = 2
self.DUT.hazard_rate_type = 2
self.DUT.quality = 1
self.DUT.operating_voltage = 1.25
self.DUT.acvapplied = 0.0025
self.DUT.rated_voltage = 3.3
self.DUT.capacitance = 2.7E-6
self.DUT.reference_temperature = 0.0
self.assertTrue(self.DUT.calculate_part())
| 39.915301
| 143
| 0.625094
| 1,859
| 14,609
| 4.762776
| 0.115116
| 0.132821
| 0.082223
| 0.107522
| 0.900949
| 0.888638
| 0.887621
| 0.868873
| 0.853513
| 0.845381
| 0
| 0.071869
| 0.267575
| 14,609
| 365
| 144
| 40.024658
| 0.755607
| 0.165446
| 0
| 0.771186
| 0
| 0
| 0.040849
| 0.002378
| 0
| 0
| 0
| 0
| 0.330508
| 1
| 0.067797
| false
| 0
| 0.021186
| 0
| 0.097458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
252dea614c6d045d759de1d1345ad360975b76bd
| 8,763
|
py
|
Python
|
cloudscale/tests/test_objects_user.py
|
resmo/python-cloudscale
|
e194e3f74c4df549e59781861d4a0a1e1abf62fc
|
[
"MIT"
] | 6
|
2019-11-21T15:08:58.000Z
|
2019-12-18T07:46:01.000Z
|
cloudscale/tests/test_objects_user.py
|
resmo/python-cloudscale
|
e194e3f74c4df549e59781861d4a0a1e1abf62fc
|
[
"MIT"
] | 15
|
2019-11-26T19:48:12.000Z
|
2020-05-01T14:52:07.000Z
|
cloudscale/tests/test_objects_user.py
|
resmo/python-cloudscale
|
e194e3f74c4df549e59781861d4a0a1e1abf62fc
|
[
"MIT"
] | null | null | null |
from cloudscale import Cloudscale, CloudscaleApiException, CloudscaleException, CLOUDSCALE_API_ENDPOINT
from cloudscale.cli import cli
import responses
import click
from click.testing import CliRunner
OBJECTS_USER_RESP = {
"href": "https://api.cloudscale.ch/v1/objects-users/6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15",
"id": "6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15",
"display_name": "alan",
"keys": [
{
"access_key": "0ZTAIBKSGYBRHQ09G11W",
"secret_key": "bn2ufcwbIa0ARLc5CLRSlVaCfFxPHOpHmjKiH34T"
}
],
"tags": {
"project": "apollo"
}
}
@responses.activate
def test_objects_user_get_all():
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json=[OBJECTS_USER_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users?tag:project=apollo',
json=[OBJECTS_USER_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json={},
status=500)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json=[OBJECTS_USER_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users?tag:project=apollo',
json=[OBJECTS_USER_RESP],
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json={},
status=500)
cloudscale = Cloudscale(api_token="token")
objects_users = cloudscale.objects_user.get_all()
assert objects_users[0]['display_name'] == "alan"
assert objects_users[0]['id'] == "6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15"
cloudscale = Cloudscale(api_token="token")
objects_users = cloudscale.objects_user.get_all(filter_tag="project=apollo")
assert objects_users[0]['display_name'] == "alan"
assert objects_users[0]['id'] == "6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15"
try:
cloudscale = Cloudscale(api_token="token")
cloudscale.objects_user.get_all()
except CloudscaleApiException as e:
assert e.status_code == 500
assert str(e).startswith("API Response Error (500):")
runner = CliRunner()
result = runner.invoke(cli, [
'objects-user',
'-a',
'token',
'list',
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'objects-user',
'-a',
'token',
'list',
'--filter-tag',
'project=apollo',
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'objects-user',
'-a',
'token',
'list',
])
assert result.exit_code > 0
@responses.activate
def test_objects_user_get_by_uuid():
uuid = "6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15"
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users/' + uuid,
json=OBJECTS_USER_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users/unknown',
json={},
status=404)
cloudscale = Cloudscale(api_token="token")
objects_user = cloudscale.objects_user.get_by_uuid(uuid=uuid)
assert objects_user['display_name'] == "alan"
assert objects_user['id'] == uuid
try:
cloudscale = Cloudscale(api_token="token")
cloudscale.objects_user.get_by_uuid(uuid="unknown")
except CloudscaleApiException as e:
assert e.status_code == 404
runner = CliRunner()
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'show',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'show',
'unknown',
])
assert result.exit_code > 0
@responses.activate
def test_objects_user_delete():
uuid = "6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15"
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users/' + uuid,
json=OBJECTS_USER_RESP,
status=200)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users/unknown',
json=OBJECTS_USER_RESP,
status=200)
responses.add(
responses.DELETE,
CLOUDSCALE_API_ENDPOINT + '/objects-users/' + uuid,
status=204)
responses.add(
responses.DELETE,
CLOUDSCALE_API_ENDPOINT + '/objects-users/unknown',
json={},
status=404)
cloudscale = Cloudscale(api_token="token")
objects_user = cloudscale.objects_user.delete(uuid=uuid)
assert objects_user is None
try:
cloudscale = Cloudscale(api_token="token")
cloudscale.objects_user.delete(uuid="unknown")
except CloudscaleApiException as e:
assert e.status_code == 404
runner = CliRunner()
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'delete',
uuid,
])
assert result.exit_code == 1
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'delete',
'--force',
uuid,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'delete',
'--force',
'unknown',
])
assert result.exit_code > 0
@responses.activate
def test_objects_user_create():
display_name = "alan"
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json=OBJECTS_USER_RESP,
status=201)
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json={},
status=500)
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json=OBJECTS_USER_RESP,
status=201)
responses.add(
responses.POST,
CLOUDSCALE_API_ENDPOINT + '/objects-users',
json={},
status=500)
cloudscale = Cloudscale(api_token="token")
cloudscale.objects_user.create(
display_name=display_name,
)
try:
cloudscale = Cloudscale(api_token="token")
cloudscale.objects_user.create(
display_name=display_name,
)
except CloudscaleApiException as e:
assert e.status_code == 500
runner = CliRunner()
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'create',
'--display-name',
display_name,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'create',
'--display-name',
display_name,
])
assert result.exit_code > 0
@responses.activate
def test_objects_user_update():
uuid = "6fe39134bf4178747eebc429f82cfafdd08891d4279d0d899bc4012db1db6a15"
display_name = "alan"
responses.add(
responses.PATCH,
CLOUDSCALE_API_ENDPOINT + '/objects-users/' + uuid,
json=OBJECTS_USER_RESP,
status=204)
responses.add(
responses.GET,
CLOUDSCALE_API_ENDPOINT + '/objects-users/' + uuid,
json=OBJECTS_USER_RESP,
status=200)
responses.add(
responses.PATCH,
CLOUDSCALE_API_ENDPOINT + '/objects-users/unknown',
json={},
status=404)
cloudscale = Cloudscale(api_token="token")
objects_user = cloudscale.objects_user.update(
uuid=uuid,
display_name=display_name
)
assert objects_user['display_name'] == display_name
assert objects_user['id'] == uuid
try:
cloudscale = Cloudscale(api_token="token")
objects_user = cloudscale.objects_user.update(
uuid="unknown",
display_name=display_name
)
except CloudscaleApiException as e:
assert e.status_code == 404
runner = CliRunner()
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'update',
uuid,
'--display-name',
display_name,
])
assert result.exit_code == 0
result = runner.invoke(cli, [
'objects-user',
'-a', 'token',
'update',
'unknown',
'--display-name',
display_name,
])
assert result.exit_code > 0
def test_objects_user_missing_api_key():
runner = CliRunner()
result = runner.invoke(cli, [
'objects-user',
'list',
])
assert result.exit_code == 1
| 27.643533
| 122
| 0.612119
| 865
| 8,763
| 6.002312
| 0.093642
| 0.108051
| 0.080894
| 0.102465
| 0.879045
| 0.840139
| 0.830894
| 0.807589
| 0.798536
| 0.736518
| 0
| 0.058778
| 0.269999
| 8,763
| 316
| 123
| 27.731013
| 0.752853
| 0
| 0
| 0.848797
| 0
| 0
| 0.179048
| 0.065959
| 0
| 0
| 0
| 0
| 0.09622
| 1
| 0.020619
| false
| 0
| 0.017182
| 0
| 0.037801
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25442aa243a1e0d742c9fd52ba06e86b9ee153a9
| 44
|
py
|
Python
|
app/controller/__init__.py
|
gmbz/frro-soporte-TPI-09
|
5161f5002fa11307c357920ec2278bb97f92f926
|
[
"MIT"
] | 2
|
2021-09-19T13:30:20.000Z
|
2021-12-29T21:49:09.000Z
|
app/controller/__init__.py
|
gmbz/frro-soporte-TPI-09
|
5161f5002fa11307c357920ec2278bb97f92f926
|
[
"MIT"
] | null | null | null |
app/controller/__init__.py
|
gmbz/frro-soporte-TPI-09
|
5161f5002fa11307c357920ec2278bb97f92f926
|
[
"MIT"
] | 1
|
2021-06-24T14:24:02.000Z
|
2021-06-24T14:24:02.000Z
|
api_key = "25398bd0f8e1460f3769b59bfbf5eea6"
| 44
| 44
| 0.886364
| 3
| 44
| 12.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.452381
| 0.045455
| 44
| 1
| 44
| 44
| 0.452381
| 0
| 0
| 0
| 0
| 0
| 0.711111
| 0.711111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c2b5fc1765da17531ff3d849383d3d5a76db6377
| 33,173
|
py
|
Python
|
PyEFVLib/Shape.py
|
Gustavo029/GridReader
|
7edc950c469b06c3de0093e5fd8bf6cfd59af354
|
[
"MIT"
] | 1
|
2022-01-26T17:14:54.000Z
|
2022-01-26T17:14:54.000Z
|
PyEFVLib/Shape.py
|
Gustavo029/GridReader
|
7edc950c469b06c3de0093e5fd8bf6cfd59af354
|
[
"MIT"
] | null | null | null |
PyEFVLib/Shape.py
|
Gustavo029/GridReader
|
7edc950c469b06c3de0093e5fd8bf6cfd59af354
|
[
"MIT"
] | 3
|
2020-10-26T07:11:19.000Z
|
2022-01-26T17:14:42.000Z
|
import numpy as np
from PyEFVLib.Point import Point
def areCoplanar(p1,p2,p3,p4):
return bool( np.dot( (p2-p1), np.cross((p3-p1), (p4-p1)) ) < 1e-10 )
class Triangle:
dimension = 2
numberOfInnerFaces = 3
numberOfFacets = 3
subelementTransformedVolumes = np.array([1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0])
innerFaceShapeFunctionValues = np.array([[5.0/12.0, 5.0/12.0, 1.0/6.0],[1.0/6.0, 5.0/12.0, 5.0/12.0], [5.0/12.0, 1.0/6.0, 5.0/12.0]])
innerFaceShapeFunctionDerivatives = np.array([[[-1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0]]])
innerFaceNeighborVertices = np.array([[0, 1], [1, 2], [2, 0]])
subelementShapeFunctionValues = np.array([[7.0/12.0, 5.0/24.0, 5.0/24.0], [5.0/24.0, 7.0/12.0, 5.0/24.0], [5.0/24.0, 5.0/24.0, 7.0/12.0]])
subelementShapeFunctionDerivatives = np.array([[[-1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0]]])
facetVerticesIndexes = np.array([[1, 0], [2, 1], [0, 2]])
outerFaceShapeFunctionValues = np.array([[[1.0/4.0, 3.0/4.0, 0.0/1.0], [3.0/4.0, 1.0/4.0, 0.0/1.0]], [[0.0/1.0, 1.0/4.0, 3.0/4.0], [0.0/1.0, 3.0/4.0, 1.0/4.0]], [[3.0/4.0, 0.0/1.0, 1.0/4.0], [1.0/4.0, 0.0/1.0, 3.0/4.0]]])
vertexShapeFunctionDerivatives = np.array([[[-1.0,-1.0], [1.0,0.0], [0.0,1.0]], [[-1.0,-1.0], [1.0,0.0], [0.0,1.0]], [[-1.0,-1.0], [1.0,0.0], [0.0,1.0]]])
@staticmethod
def _is(elem):
if len(elem.vertices) == 3:
return True
else:
return False
@staticmethod
def getInnerFaceAreaVector(local, elementCentroid, elementVertices):
vertex1 = elementVertices[Triangle.innerFaceNeighborVertices[local][0]]
vertex2 = elementVertices[Triangle.innerFaceNeighborVertices[local][1]]
areaVectorCoords = ( elementCentroid - (vertex1 + vertex2)/2.0 ).getCoordinates()
return Point(areaVectorCoords[1], -areaVectorCoords[0], 0.0)
class Quadrilateral:
dimension = 2
numberOfInnerFaces = 4
numberOfFacets = 4
subelementTransformedVolumes = np.array([1.0/4.0, 1.0/4.0, 1.0/4.0, 1.0/4.0])
innerFaceShapeFunctionValues = np.array([[3.0/8.0, 3.0/8.0, 1.0/8.0, 1.0/8.0], [1.0/8.0, 3.0/8.0, 3.0/8.0, 1.0/8.0], [1.0/8.0, 1.0/8.0, 3.0/8.0, 3.0/8.0], [3.0/8.0, 1.0/8.0, 1.0/8.0, 3.0/8.0]])
innerFaceShapeFunctionDerivatives = np.array([[[-3.0/4.0, -1.0/2.0], [3.0/4.0, -1.0/2.0], [1.0/4.0, 1.0/2.0], [-1.0/4.0, 1.0/2.0]], [[-1.0/2.0, -1.0/4.0], [1.0/2.0, -3.0/4.0], [1.0/2.0, 3.0/4.0], [-1.0/2.0, 1.0/4.0]], [[-1.0/4.0, -1.0/2.0], [1.0/4.0, -1.0/2.0], [3.0/4.0, 1.0/2.0], [-3.0/4.0, 1.0/2.0]], [[-1.0/2.0, -3.0/4.0], [1.0/2.0, -1.0/4.0], [1.0/2.0, 1.0/4.0], [-1.0/2.0, 3.0/4.0]]])
innerFaceNeighborVertices = np.array([[0, 1], [1, 2], [2, 3], [3, 0]])
subelementShapeFunctionValues = np.array([[9.0/16.0, 3.0/16.0, 1.0/16.0, 3.0/16.0], [3.0/16.0, 9.0/16.0, 3.0/16.0, 1.0/16.0], [1.0/16.0, 3.0/16.0, 9.0/16.0, 3.0/16.0], [3.0/16.0, 1.0/16.0, 3.0/16.0, 9.0/16.0]])
subelementShapeFunctionDerivatives = np.array([[[-3.0/4.0, -3.0/4.0], [3.0/4.0, -1.0/4.0], [1.0/4.0, 1.0/4.0], [-1.0/4.0, 3.0/4.0]], [[-3.0/4.0, -1.0/4.0], [3.0/4.0, -3.0/4.0], [1.0/4.0, 3.0/4.0], [-1.0/4.0, 1.0/4.0]], [[-1.0/4.0, -1.0/4.0], [1.0/4.0, -3.0/4.0], [3.0/4.0, 3.0/4.0], [-3.0/4.0, 1.0/4.0]], [[-1.0/4.0, -3.0/4.0], [1.0/4.0, -1.0/4.0], [3.0/4.0, 1.0/4.0], [-3.0/4.0, 3.0/4.0]]])
facetVerticesIndexes = np.array([[1, 0], [2, 1], [3, 2], [0, 3]])
outerFaceShapeFunctionValues = np.array([[[1.0/4.0, 3.0/4.0, 0.0/1.0, 0.0/1.0], [3.0/4.0, 1.0/4.0, 0.0/1.0, 0.0/1.0]], [[0.0/1.0, 1.0/4.0, 3.0/4.0, 0.0/1.0], [0.0/1.0, 3.0/4.0, 1.0/4.0, 0.0/1.0]], [[0.0/1.0, 0.0/1.0, 1.0/4.0, 3.0/4.0], [0.0/1.0, 0.0/1.0, 3.0/4.0, 1.0/4.0]], [[3.0/4.0, 0.0/1.0, 0.0/1.0, 1.0/4.0], [1.0/4.0, 0.0/1.0, 0.0/1.0, 3.0/4.0]]])
vertexShapeFunctionDerivatives = np.array([[[-1.0,-1.0], [1.0,0.0], [0.0,0.0], [0.0,1.0]], [[-1.0,0.0], [1.0,0.0], [0.0,1.0], [0.0,0.0]], [[0.0,0.0], [0.0,-1.0], [1.0,1.0], [-1.0,0.0]], [[0.0,-1.0], [0.0,0.0], [1.0,0.0], [0.0,1.0]]])
@staticmethod
def _is(elem):
if len(elem.vertices) == 4 and areCoplanar(*[v.getCoordinates() for v in elem.vertices]):
return True
else:
return False
@staticmethod
def getInnerFaceAreaVector(local, elementCentroid, elementVertices):
vertex1 = elementVertices[Quadrilateral.innerFaceNeighborVertices[local][0]]
vertex2 = elementVertices[Quadrilateral.innerFaceNeighborVertices[local][1]]
areaVectorCoords = ( elementCentroid - (vertex1 + vertex2)/2.0 ).getCoordinates()
return Point(areaVectorCoords[1], -areaVectorCoords[0], 0.0)
class Tetrahedron:
dimension = 3
numberOfInnerFaces = 6
numberOfFacets = 4
subelementTransformedVolumes = np.array([1.0/24.0, 1.0/24.0, 1.0/24.0, 1.0/24.0])
innerFaceShapeFunctionValues = np.array([[17.0/48.0, 17.0/48.0, 7.0/48.0, 7.0/48.0], [7.0/48.0, 17.0/48.0, 17.0/48.0, 7.0/48.0], [17.0/48.0, 7.0/48.0, 17.0/48.0, 7.0/48.0], [17.0/48.0, 7.0/48.0, 7.0/48.0, 17.0/48.0], [7.0/48.0, 7.0/48.0, 17.0/48.0, 17.0/48.0], [7.0/48.0, 17.0/48.0, 7.0/48.0, 17.0/48.0]])
innerFaceShapeFunctionDerivatives = np.array([[[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]]])
innerFaceNeighborVertices = np.array([[0, 1, 3, 2], [1, 2, 3, 0], [2, 0, 3, 1], [0, 3, 2, 1], [1, 3, 0, 2], [2, 3, 1, 0]])
subelementShapeFunctionValues = np.array([[15.0/32.0, 17.0/96.0, 17.0/96.0, 17.0/96.0], [17.0/96.0, 15.0/32.0, 17.0/96.0, 17.0/96.0], [17.0/96.0, 17.0/96.0, 15.0/32.0, 17.0/96.0], [17.0/96.0, 17.0/96.0, 17.0/96.0, 15.0/32.0]])
subelementShapeFunctionDerivatives = np.array([[[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/1.0, -1.0/1.0, -1.0/1.0], [1.0/1.0, 0.0/1.0, 0.0/1.0], [0.0/1.0, 1.0/1.0, 0.0/1.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]]])
facetVerticesIndexes = np.array([[0, 2, 1], [0, 3, 2], [0, 1, 3], [1, 2, 3]])
outerFaceShapeFunctionValues = np.array([[[7.0/12.0, 5.0/24.0, 5.0/24.0, 0.0/1.0], [5.0/24.0, 5.0/24.0, 7.0/12.0, 0.0/1.0], [5.0/24.0, 7.0/12.0, 5.0/24.0, 0.0/1.0]], [[7.0/12.0, 0.0/1.0, 5.0/24.0, 5.0/24.0], [5.0/24.0, 0.0/1.0, 5.0/24.0, 7.0/12.0], [5.0/24.0, 0.0/1.0, 7.0/12.0, 5.0/24.0]], [[7.0/12.0, 5.0/24.0, 0.0/1.0, 5.0/24.0], [5.0/24.0, 7.0/12.0, 0.0/1.0, 5.0/24.0], [5.0/24.0, 5.0/24.0, 0.0/1.0, 7.0/12.0]], [[0.0/1.0, 7.0/12.0, 5.0/24.0, 5.0/24.0], [0.0/1.0, 5.0/24.0, 7.0/12.0, 5.0/24.0], [0.0/1.0, 5.0/24.0, 5.0/24.0, 7.0/12.0]]])
@staticmethod
def _is(elem):
if len(elem.vertices) == 4 and not areCoplanar(*[v.getCoordinates() for v in elem.vertices]):
return True
else:
return False
@staticmethod
def getInnerFaceAreaVector(local, elementCentroid, elementVertices):
b = Tetrahedron.innerFaceNeighborVertices[local][0]
f = Tetrahedron.innerFaceNeighborVertices[local][1]
q = Tetrahedron.innerFaceNeighborVertices[local][2]
w = Tetrahedron.innerFaceNeighborVertices[local][3]
p0 = ( elementCentroid ).getCoordinates()
p1 = ( (elementVertices[b] + elementVertices[f] + elementVertices[q]) / 3.0 ).getCoordinates()
p2 = ( (elementVertices[b] + elementVertices[f]) / 2.0 ).getCoordinates()
p3 = ( (elementVertices[b] + elementVertices[f] + elementVertices[w]) / 3.0 ).getCoordinates()
p = ( np.cross((p1-p0),(p3-p0)) + np.cross((p3-p2),(p1-p2)) ) / 2.0
return Point(*p)
class Hexahedron:
dimension = 3
numberOfInnerFaces = 12
numberOfFacets = 6
subelementTransformedVolumes = np.array([1.0/8.0, 1.0/8.0, 1.0/8.0, 1.0/8.0, 1.0/8.0, 1.0/8.0, 1.0/8.0, 1.0/8.0])
innerFaceShapeFunctionValues = np.array([[9.0/32.0, 9.0/32.0, 3.0/32.0, 3.0/32.0, 3.0/32.0, 3.0/32.0, 1.0/32.0, 1.0/32.0], [3.0/32.0, 9.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0, 3.0/32.0, 1.0/32.0], [3.0/32.0, 3.0/32.0, 9.0/32.0, 9.0/32.0, 1.0/32.0, 1.0/32.0, 3.0/32.0, 3.0/32.0], [9.0/32.0, 3.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0, 1.0/32.0, 3.0/32.0], [3.0/32.0, 3.0/32.0, 1.0/32.0, 1.0/32.0, 9.0/32.0, 9.0/32.0, 3.0/32.0, 3.0/32.0], [1.0/32.0, 3.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0, 9.0/32.0, 3.0/32.0], [1.0/32.0, 1.0/32.0, 3.0/32.0, 3.0/32.0, 3.0/32.0, 3.0/32.0, 9.0/32.0, 9.0/32.0], [3.0/32.0, 1.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0, 3.0/32.0, 9.0/32.0], [9.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0], [3.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0], [1.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0], [3.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0, 3.0/32.0, 1.0/32.0, 3.0/32.0, 9.0/32.0]])
innerFaceShapeFunctionDerivatives = np.array([[[-9.0/16.0, -3.0/8.0, -3.0/8.0], [9.0/16.0, -3.0/8.0, -3.0/8.0], [3.0/16.0, 3.0/8.0, -1.0/8.0], [-3.0/16.0, 3.0/8.0, -1.0/8.0], [-3.0/16.0, -1.0/8.0, 3.0/8.0], [3.0/16.0, -1.0/8.0, 3.0/8.0], [1.0/16.0, 1.0/8.0, 1.0/8.0], [-1.0/16.0, 1.0/8.0, 1.0/8.0]], [[-3.0/8.0, -3.0/16.0, -1.0/8.0], [3.0/8.0, -9.0/16.0, -3.0/8.0], [3.0/8.0, 9.0/16.0, -3.0/8.0], [-3.0/8.0, 3.0/16.0, -1.0/8.0], [-1.0/8.0, -1.0/16.0, 1.0/8.0], [1.0/8.0, -3.0/16.0, 3.0/8.0], [1.0/8.0, 3.0/16.0, 3.0/8.0], [-1.0/8.0, 1.0/16.0, 1.0/8.0]], [[-3.0/16.0, -3.0/8.0, -1.0/8.0], [3.0/16.0, -3.0/8.0, -1.0/8.0], [9.0/16.0, 3.0/8.0, -3.0/8.0], [-9.0/16.0, 3.0/8.0, -3.0/8.0], [-1.0/16.0, -1.0/8.0, 1.0/8.0], [1.0/16.0, -1.0/8.0, 1.0/8.0], [3.0/16.0, 1.0/8.0, 3.0/8.0], [-3.0/16.0, 1.0/8.0, 3.0/8.0]], [[-3.0/8.0, -9.0/16.0, -3.0/8.0], [3.0/8.0, -3.0/16.0, -1.0/8.0], [3.0/8.0, 3.0/16.0, -1.0/8.0], [-3.0/8.0, 9.0/16.0, -3.0/8.0], [-1.0/8.0, -3.0/16.0, 3.0/8.0], [1.0/8.0, -1.0/16.0, 1.0/8.0], [1.0/8.0, 1.0/16.0, 1.0/8.0], [-1.0/8.0, 3.0/16.0, 3.0/8.0]], [[-3.0/16.0, -1.0/8.0, -3.0/8.0], [3.0/16.0, -1.0/8.0, -3.0/8.0], [1.0/16.0, 1.0/8.0, -1.0/8.0], [-1.0/16.0, 1.0/8.0, -1.0/8.0], [-9.0/16.0, -3.0/8.0, 3.0/8.0], [9.0/16.0, -3.0/8.0, 3.0/8.0], [3.0/16.0, 3.0/8.0, 1.0/8.0], [-3.0/16.0, 3.0/8.0, 1.0/8.0]], [[-1.0/8.0, -1.0/16.0, -1.0/8.0], [1.0/8.0, -3.0/16.0, -3.0/8.0], [1.0/8.0, 3.0/16.0, -3.0/8.0], [-1.0/8.0, 1.0/16.0, -1.0/8.0], [-3.0/8.0, -3.0/16.0, 1.0/8.0], [3.0/8.0, -9.0/16.0, 3.0/8.0], [3.0/8.0, 9.0/16.0, 3.0/8.0], [-3.0/8.0, 3.0/16.0, 1.0/8.0]], [[-1.0/16.0, -1.0/8.0, -1.0/8.0], [1.0/16.0, -1.0/8.0, -1.0/8.0], [3.0/16.0, 1.0/8.0, -3.0/8.0], [-3.0/16.0, 1.0/8.0, -3.0/8.0], [-3.0/16.0, -3.0/8.0, 1.0/8.0], [3.0/16.0, -3.0/8.0, 1.0/8.0], [9.0/16.0, 3.0/8.0, 3.0/8.0], [-9.0/16.0, 3.0/8.0, 3.0/8.0]], [[-1.0/8.0, -3.0/16.0, -3.0/8.0], [1.0/8.0, -1.0/16.0, -1.0/8.0], [1.0/8.0, 1.0/16.0, -1.0/8.0], [-1.0/8.0, 3.0/16.0, -3.0/8.0], [-3.0/8.0, -9.0/16.0, 3.0/8.0], [3.0/8.0, -3.0/16.0, 1.0/8.0], [3.0/8.0, 3.0/16.0, 1.0/8.0], [-3.0/8.0, 9.0/16.0, 3.0/8.0]], [[-3.0/8.0, -3.0/8.0, -9.0/16.0], [3.0/8.0, -1.0/8.0, -3.0/16.0], [1.0/8.0, 1.0/8.0, -1.0/16.0], [-1.0/8.0, 3.0/8.0, -3.0/16.0], [-3.0/8.0, -3.0/8.0, 9.0/16.0], [3.0/8.0, -1.0/8.0, 3.0/16.0], [1.0/8.0, 1.0/8.0, 1.0/16.0], [-1.0/8.0, 3.0/8.0, 3.0/16.0]], [[-3.0/8.0, -1.0/8.0, -3.0/16.0], [3.0/8.0, -3.0/8.0, -9.0/16.0], [1.0/8.0, 3.0/8.0, -3.0/16.0], [-1.0/8.0, 1.0/8.0, -1.0/16.0], [-3.0/8.0, -1.0/8.0, 3.0/16.0], [3.0/8.0, -3.0/8.0, 9.0/16.0], [1.0/8.0, 3.0/8.0, 3.0/16.0], [-1.0/8.0, 1.0/8.0, 1.0/16.0]], [[-1.0/8.0, -1.0/8.0, -1.0/16.0], [1.0/8.0, -3.0/8.0, -3.0/16.0], [3.0/8.0, 3.0/8.0, -9.0/16.0], [-3.0/8.0, 1.0/8.0, -3.0/16.0], [-1.0/8.0, -1.0/8.0, 1.0/16.0], [1.0/8.0, -3.0/8.0, 3.0/16.0], [3.0/8.0, 3.0/8.0, 9.0/16.0], [-3.0/8.0, 1.0/8.0, 3.0/16.0]], [[-1.0/8.0, -3.0/8.0, -3.0/16.0], [1.0/8.0, -1.0/8.0, -1.0/16.0], [3.0/8.0, 1.0/8.0, -3.0/16.0], [-3.0/8.0, 3.0/8.0, -9.0/16.0], [-1.0/8.0, -3.0/8.0, 3.0/16.0], [1.0/8.0, -1.0/8.0, 1.0/16.0], [3.0/8.0, 1.0/8.0, 3.0/16.0], [-3.0/8.0, 3.0/8.0, 9.0/16.0]]])
innerFaceNeighborVertices = np.array([[0, 1, 4, 5, 2, 3], [1, 2, 5, 6, 3, 0], [2, 3, 6, 7, 0, 1], [3, 0, 7, 4, 1, 2], [4, 5, 6, 7, 0, 1], [5, 6, 7, 4, 1, 2], [6, 7, 4, 5, 2, 3], [7, 4, 5, 6, 3, 0], [4, 0, 1, 5, 3, 7], [5, 1, 2, 6, 4, 0], [6, 2, 3, 7, 5, 1], [7, 3, 4, 0, 6, 2]])
subelementShapeFunctionValues = np.array([[27.0/64.0, 9.0/64.0, 3.0/64.0, 9.0/64.0, 9.0/64.0, 3.0/64.0, 1.0/64.0, 3.0/64.0], [9.0/64.0, 27.0/64.0, 9.0/64.0, 3.0/64.0, 3.0/64.0, 9.0/64.0, 3.0/64.0, 1.0/64.0], [3.0/64.0, 9.0/64.0, 27.0/64.0, 9.0/64.0, 1.0/64.0, 3.0/64.0, 9.0/64.0, 3.0/64.0], [9.0/64.0, 3.0/64.0, 9.0/64.0, 27.0/64.0, 3.0/64.0, 1.0/64.0, 3.0/64.0, 9.0/64.0], [9.0/64.0, 3.0/64.0, 1.0/64.0, 3.0/64.0, 27.0/64.0, 9.0/64.0, 3.0/64.0, 9.0/64.0], [3.0/64.0, 9.0/64.0, 3.0/64.0, 1.0/64.0, 9.0/64.0, 27.0/64.0, 9.0/64.0, 3.0/64.0], [1.0/64.0, 3.0/64.0, 9.0/64.0, 3.0/64.0, 3.0/64.0, 9.0/64.0, 27.0/64.0, 9.0/64.0], [3.0/64.0, 1.0/64.0, 3.0/64.0, 9.0/64.0, 9.0/64.0, 3.0/64.0, 9.0/64.0, 27.0/64.0]])
subelementShapeFunctionDerivatives = np.array([[[-9.0/16.0, -9.0/16.0, -9.0/16.0], [9.0/16.0, -3.0/16.0, -3.0/16.0], [3.0/16.0, 3.0/16.0, -1.0/16.0], [-3.0/16.0, 9.0/16.0, -3.0/16.0], [-3.0/16.0, -3.0/16.0, 9.0/16.0], [3.0/16.0, -1.0/16.0, 3.0/16.0], [1.0/16.0, 1.0/16.0, 1.0/16.0], [-1.0/16.0, 3.0/16.0, 3.0/16.0]], [[-9.0/16.0, -3.0/16.0, -3.0/16.0], [9.0/16.0, -9.0/16.0, -9.0/16.0], [3.0/16.0, 9.0/16.0, -3.0/16.0], [-3.0/16.0, 3.0/16.0, -1.0/16.0], [-3.0/16.0, -1.0/16.0, 3.0/16.0], [3.0/16.0, -3.0/16.0, 9.0/16.0], [1.0/16.0, 3.0/16.0, 3.0/16.0], [-1.0/16.0, 1.0/16.0, 1.0/16.0]], [[-3.0/16.0, -3.0/16.0, -1.0/16.0], [3.0/16.0, -9.0/16.0, -3.0/16.0], [9.0/16.0, 9.0/16.0, -9.0/16.0], [-9.0/16.0, 3.0/16.0, -3.0/16.0], [-1.0/16.0, -1.0/16.0, 1.0/16.0], [1.0/16.0, -3.0/16.0, 3.0/16.0], [3.0/16.0, 3.0/16.0, 9.0/16.0], [-3.0/16.0, 1.0/16.0, 3.0/16.0]], [[-3.0/16.0, -9.0/16.0, -3.0/16.0], [3.0/16.0, -3.0/16.0, -1.0/16.0], [9.0/16.0, 3.0/16.0, -3.0/16.0], [-9.0/16.0, 9.0/16.0, -9.0/16.0], [-1.0/16.0, -3.0/16.0, 3.0/16.0], [1.0/16.0, -1.0/16.0, 1.0/16.0], [3.0/16.0, 1.0/16.0, 3.0/16.0], [-3.0/16.0, 3.0/16.0, 9.0/16.0]], [[-3.0/16.0, -3.0/16.0, -9.0/16.0], [3.0/16.0, -1.0/16.0, -3.0/16.0], [1.0/16.0, 1.0/16.0, -1.0/16.0], [-1.0/16.0, 3.0/16.0, -3.0/16.0], [-9.0/16.0, -9.0/16.0, 9.0/16.0], [9.0/16.0, -3.0/16.0, 3.0/16.0], [3.0/16.0, 3.0/16.0, 1.0/16.0], [-3.0/16.0, 9.0/16.0, 3.0/16.0]], [[-3.0/16.0, -1.0/16.0, -3.0/16.0], [3.0/16.0, -3.0/16.0, -9.0/16.0], [1.0/16.0, 3.0/16.0, -3.0/16.0], [-1.0/16.0, 1.0/16.0, -1.0/16.0], [-9.0/16.0, -3.0/16.0, 3.0/16.0], [9.0/16.0, -9.0/16.0, 9.0/16.0], [3.0/16.0, 9.0/16.0, 3.0/16.0], [-3.0/16.0, 3.0/16.0, 1.0/16.0]], [[-1.0/16.0, -1.0/16.0, -1.0/16.0], [1.0/16.0, -3.0/16.0, -3.0/16.0], [3.0/16.0, 3.0/16.0, -9.0/16.0], [-3.0/16.0, 1.0/16.0, -3.0/16.0], [-3.0/16.0, -3.0/16.0, 1.0/16.0], [3.0/16.0, -9.0/16.0, 3.0/16.0], [9.0/16.0, 9.0/16.0, 9.0/16.0], [-9.0/16.0, 3.0/16.0, 3.0/16.0]], [[-1.0/16.0, -3.0/16.0, -3.0/16.0], [1.0/16.0, -1.0/16.0, -1.0/16.0], [3.0/16.0, 1.0/16.0, -3.0/16.0], [-3.0/16.0, 3.0/16.0, -9.0/16.0], [-3.0/16.0, -9.0/16.0, 3.0/16.0], [3.0/16.0, -3.0/16.0, 1.0/16.0], [9.0/16.0, 3.0/16.0, 3.0/16.0], [-9.0/16.0, 9.0/16.0, 9.0/16.0]]])
facetVerticesIndexes = np.array([[0, 3, 2, 1], [0, 4, 7, 3], [0, 1, 5, 4], [4, 5, 6, 7], [1, 2, 6, 5], [2, 3, 7, 6]])
outerFaceShapeFunctionValues = np.array([[[9.0/16.0, 3.0/16.0, 1.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0], [3.0/16.0, 1.0/16.0, 3.0/16.0, 9.0/16.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0], [1.0/16.0, 3.0/16.0, 9.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0], [3.0/16.0, 9.0/16.0, 3.0/16.0, 1.0/16.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0]], [[9.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 1.0/16.0], [3.0/16.0, 0.0/1.0, 0.0/1.0, 1.0/16.0, 9.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0], [1.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 9.0/16.0], [3.0/16.0, 0.0/1.0, 0.0/1.0, 9.0/16.0, 1.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0]], [[9.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 1.0/16.0, 0.0/1.0, 0.0/1.0], [3.0/16.0, 9.0/16.0, 0.0/1.0, 0.0/1.0, 1.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0], [1.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 9.0/16.0, 0.0/1.0, 0.0/1.0], [3.0/16.0, 1.0/16.0, 0.0/1.0, 0.0/1.0, 9.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0]], [[0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 9.0/16.0, 3.0/16.0, 1.0/16.0, 3.0/16.0], [0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 9.0/16.0, 3.0/16.0, 1.0/16.0], [0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 1.0/16.0, 3.0/16.0, 9.0/16.0, 3.0/16.0], [0.0/1.0, 0.0/1.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 1.0/16.0, 3.0/16.0, 9.0/16.0]], [[0.0/1.0, 9.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 1.0/16.0, 0.0/1.0], [0.0/1.0, 3.0/16.0, 9.0/16.0, 0.0/1.0, 0.0/1.0, 1.0/16.0, 3.0/16.0, 0.0/1.0], [0.0/1.0, 1.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 9.0/16.0, 0.0/1.0], [0.0/1.0, 3.0/16.0, 1.0/16.0, 0.0/1.0, 0.0/1.0, 9.0/16.0, 3.0/16.0, 0.0/1.0]], [[0.0/1.0, 0.0/1.0, 9.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 1.0/16.0], [0.0/1.0, 0.0/1.0, 3.0/16.0, 9.0/16.0, 0.0/1.0, 0.0/1.0, 1.0/16.0, 3.0/16.0], [0.0/1.0, 0.0/1.0, 1.0/16.0, 3.0/16.0, 0.0/1.0, 0.0/1.0, 3.0/16.0, 9.0/16.0], [0.0/1.0, 0.0/1.0, 3.0/16.0, 1.0/16.0, 0.0/1.0, 0.0/1.0, 9.0/16.0, 3.0/16.0]]])
@staticmethod
def _is(elem):
if len(elem.vertices) == 8:
return True
else:
return False
@staticmethod
def getInnerFaceAreaVector(local, elementCentroid, elementVertices):
b = elementVertices[ (Hexahedron.innerFaceNeighborVertices[local])[0] ]
f = elementVertices[ (Hexahedron.innerFaceNeighborVertices[local])[1] ]
q = elementVertices[ (Hexahedron.innerFaceNeighborVertices[local])[2] ]
w = elementVertices[ (Hexahedron.innerFaceNeighborVertices[local])[3] ]
e = elementVertices[ (Hexahedron.innerFaceNeighborVertices[local])[4] ]
r = elementVertices[ (Hexahedron.innerFaceNeighborVertices[local])[5] ]
# Element centroid
x0, y0, z0 = elementCentroid.getCoordinates()
# Facet [f-b-q-w] centroid
x1, y1, z1 = ( (b + f + q + w) / 4.0 ).getCoordinates()
# Edge [f-b] midpoint
x2, y2, z2 = ( (b + f) / 2.0 ).getCoordinates()
# Facet [f-b-e-r] centroid
x3, y3, z3 = ( (b + f + e + r) / 4.0 ).getCoordinates()
# Face area vector components
x = 0.5 * ((y1-y0)*(z3-z0) - (y3-y0)*(z1-z0) + (y3-y2)*(z1-z2) - (y1-y2)*(z3-z2))
y = 0.5 * ((x3-x0)*(z1-z0) - (x1-x0)*(z3-z0) + (x1-x2)*(z3-z2) - (x3-x2)*(z1-z2))
z = 0.5 * ((x1-x0)*(y3-y0) - (x3-x0)*(y1-y0) + (x3-x2)*(y1-y2) - (x1-x2)*(y3-y2))
return Point(x, y, z)
class Prism:
dimension = 3
numberOfInnerFaces = 9
numberOfFacets = 5
subelementTransformedVolumes = np.array([1.0/12.0, 1.0/12.0, 1.0/12.0, 1.0/12.0, 1.0/12.0, 1.0/12.0])
innerFaceShapeFunctionValues = np.array([[5.0/16.0, 5.0/16.0, 1.0/8.0, 5.0/48.0, 5.0/48.0, 1.0/24.0], [1.0/8.0, 5.0/16.0, 5.0/16.0, 1.0/24.0, 5.0/48.0, 5.0/48.0], [5.0/16.0, 1.0/8.0, 5.0/16.0, 5.0/48.0, 1.0/24.0, 5.0/48.0], [5.0/48.0, 5.0/48.0, 1.0/24.0, 5.0/16.0, 5.0/16.0, 1.0/8.0], [1.0/24.0, 5.0/48.0, 5.0/48.0, 1.0/8.0, 5.0/16.0, 5.0/16.0], [5.0/48.0, 1.0/24.0, 5.0/48.0, 5.0/16.0, 1.0/8.0, 5.0/16.0], [7.0/24.0, 5.0/48.0, 5.0/48.0, 7.0/24.0, 5.0/48.0, 5.0/48.0], [5.0/48.0, 7.0/24.0, 5.0/48.0, 5.0/48.0, 7.0/24.0, 5.0/48.0], [5.0/48.0, 5.0/48.0, 7.0/24.0, 5.0/48.0, 5.0/48.0, 7.0/24.0]])
innerFaceShapeFunctionDerivatives = np.array([[[-3.0/4.0, -3.0/4.0, -5.0/12.0], [3.0/4.0, 0.0/1.0, -5.0/12.0], [0.0/1.0, 3.0/4.0, -1.0/6.0], [-1.0/4.0, -1.0/4.0, 5.0/12.0], [1.0/4.0, 0.0/1.0, 5.0/12.0], [0.0/1.0, 1.0/4.0, 1.0/6.0]], [[-3.0/4.0, -3.0/4.0, -1.0/6.0], [3.0/4.0, 0.0/1.0, -5.0/12.0], [0.0/1.0, 3.0/4.0, -5.0/12.0], [-1.0/4.0, -1.0/4.0, 1.0/6.0], [1.0/4.0, 0.0/1.0, 5.0/12.0], [0.0/1.0, 1.0/4.0, 5.0/12.0]], [[-3.0/4.0, -3.0/4.0, -5.0/12.0], [3.0/4.0, 0.0/1.0, -1.0/6.0], [0.0/1.0, 3.0/4.0, -5.0/12.0], [-1.0/4.0, -1.0/4.0, 5.0/12.0], [1.0/4.0, 0.0/1.0, 1.0/6.0], [0.0/1.0, 1.0/4.0, 5.0/12.0]], [[-1.0/4.0, -1.0/4.0, -5.0/12.0], [1.0/4.0, 0.0/1.0, -5.0/12.0], [0.0/1.0, 1.0/4.0, -1.0/6.0], [-3.0/4.0, -3.0/4.0, 5.0/12.0], [3.0/4.0, 0.0/1.0, 5.0/12.0], [0.0/1.0, 3.0/4.0, 1.0/6.0]], [[-1.0/4.0, -1.0/4.0, -1.0/6.0], [1.0/4.0, 0.0/1.0, -5.0/12.0], [0.0/1.0, 1.0/4.0, -5.0/12.0], [-3.0/4.0, -3.0/4.0, 1.0/6.0], [3.0/4.0, 0.0/1.0, 5.0/12.0], [0.0/1.0, 3.0/4.0, 5.0/12.0]], [[-1.0/4.0, -1.0/4.0, -5.0/12.0], [1.0/4.0, 0.0/1.0, -1.0/6.0], [0.0/1.0, 1.0/4.0, -5.0/12.0], [-3.0/4.0, -3.0/4.0, 5.0/12.0], [3.0/4.0, 0.0/1.0, 1.0/6.0], [0.0/1.0, 3.0/4.0, 5.0/12.0]], [[-1.0/2.0, -1.0/2.0, -7.0/12.0], [1.0/2.0, 0.0/1.0, -5.0/24.0], [0.0/1.0, 1.0/2.0, -5.0/24.0], [-1.0/2.0, -1.0/2.0, 7.0/12.0], [1.0/2.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 1.0/2.0, 5.0/24.0]], [[-1.0/2.0, -1.0/2.0, -5.0/24.0], [1.0/2.0, 0.0/1.0, -7.0/12.0], [0.0/1.0, 1.0/2.0, -5.0/24.0], [-1.0/2.0, -1.0/2.0, 5.0/24.0], [1.0/2.0, 0.0/1.0, 7.0/12.0], [0.0/1.0, 1.0/2.0, 5.0/24.0]], [[-1.0/2.0, -1.0/2.0, -5.0/24.0], [1.0/2.0, 0.0/1.0, -5.0/24.0], [0.0/1.0, 1.0/2.0, -7.0/12.0], [-1.0/2.0, -1.0/2.0, 5.0/24.0], [1.0/2.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 1.0/2.0, 7.0/12.0]]])
innerFaceNeighborVertices = np.array([[0, 1, 3, 4, 2, 0], [1, 2, 4, 5, 0, 0], [2, 0, 5, 3, 1, 0], [4, 3, 0, 1, 5, 0], [5, 4, 1, 2, 3, 0], [3, 5, 2, 0, 4, 0], [3, 0, 1, 4, 2, 5], [4, 1, 2, 5, 3, 0], [5, 2, 3, 0, 4, 1]])
subelementShapeFunctionValues = np.array([[7.0/16.0, 5.0/32.0, 5.0/32.0, 7.0/48.0, 5.0/96.0, 5.0/96.0], [5.0/32.0, 7.0/16.0, 5.0/32.0, 5.0/96.0, 7.0/48.0, 5.0/96.0], [5.0/32.0, 5.0/32.0, 7.0/16.0, 5.0/96.0, 5.0/96.0, 7.0/48.0], [7.0/48.0, 5.0/96.0, 5.0/96.0, 7.0/16.0, 5.0/32.0, 5.0/32.0], [5.0/96.0, 7.0/48.0, 5.0/96.0, 5.0/32.0, 7.0/16.0, 5.0/32.0], [5.0/96.0, 5.0/96.0, 7.0/48.0, 5.0/32.0, 5.0/32.0, 7.0/16.0]])
subelementShapeFunctionDerivatives = np.array([[[-3.0/4.0, -3.0/4.0, -7.0/12.0], [3.0/4.0, 0.0/1.0, -5.0/24.0], [0.0/1.0, 3.0/4.0, -5.0/24.0], [-1.0/4.0, -1.0/4.0, 7.0/12.0], [1.0/4.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 1.0/4.0, 5.0/24.0]], [[-3.0/4.0, -3.0/4.0, -5.0/24.0], [3.0/4.0, 0.0/1.0, -7.0/12.0], [0.0/1.0, 3.0/4.0, -5.0/24.0], [-1.0/4.0, -1.0/4.0, 5.0/24.0], [1.0/4.0, 0.0/1.0, 7.0/12.0], [0.0/1.0, 1.0/4.0, 5.0/24.0]], [[-3.0/4.0, -3.0/4.0, -5.0/24.0], [3.0/4.0, 0.0/1.0, -5.0/24.0], [0.0/1.0, 3.0/4.0, -7.0/12.0], [-1.0/4.0, -1.0/4.0, 5.0/24.0], [1.0/4.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 1.0/4.0, 7.0/12.0]], [[-1.0/4.0, -1.0/4.0, -7.0/12.0], [1.0/4.0, 0.0/1.0, -5.0/24.0], [0.0/1.0, 1.0/4.0, -5.0/24.0], [-3.0/4.0, -3.0/4.0, 7.0/12.0], [3.0/4.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 3.0/4.0, 5.0/24.0]], [[-1.0/4.0, -1.0/4.0, -5.0/24.0], [1.0/4.0, 0.0/1.0, -7.0/12.0], [0.0/1.0, 1.0/4.0, -5.0/24.0], [-3.0/4.0, -3.0/4.0, 5.0/24.0], [3.0/4.0, 0.0/1.0, 7.0/12.0], [0.0/1.0, 3.0/4.0, 5.0/24.0]], [[-1.0/4.0, -1.0/4.0, -5.0/24.0], [1.0/4.0, 0.0/1.0, -5.0/24.0], [0.0/1.0, 1.0/4.0, -7.0/12.0], [-3.0/4.0, -3.0/4.0, 5.0/24.0], [3.0/4.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 3.0/4.0, 7.0/12.0]]])
facetVerticesIndexes = np.array([[0, 2, 1], [3, 4, 5], [0, 3, 5, 2], [0, 1, 4, 3], [1, 2, 5, 4]], dtype=np.object)
outerFaceShapeFunctionValues = np.array([[[7.0/12.0, 5.0/24.0, 5.0/24.0, 0.0/1.0, 0.0/1.0, 0.0/1.0], [5.0/24.0, 5.0/24.0, 7.0/12.0, 0.0/1.0, 0.0/1.0, 0.0/1.0], [5.0/24.0, 7.0/12.0, 5.0/24.0, 0.0/1.0, 0.0/1.0, 0.0/1.0]], [[0.0/1.0, 0.0/1.0, 0.0/1.0, 7.0/12.0, 5.0/24.0, 5.0/24.0], [0.0/1.0, 0.0/1.0, 0.0/1.0, 5.0/24.0, 7.0/12.0, 5.0/24.0], [0.0/1.0, 0.0/1.0, 0.0/1.0, 5.0/24.0, 5.0/24.0, 7.0/12.0]], [[9.0/16.0, 0.0/1.0, 3.0/16.0, 3.0/16.0, 0.0/1.0, 1.0/16.0], [3.0/16.0, 0.0/1.0, 1.0/16.0, 9.0/16.0, 0.0/1.0, 3.0/16.0], [1.0/16.0, 0.0/1.0, 3.0/16.0, 3.0/16.0, 0.0/1.0, 9.0/16.0], [3.0/16.0, 0.0/1.0, 9.0/16.0, 1.0/16.0, 0.0/1.0, 3.0/16.0]], [[9.0/16.0, 3.0/16.0, 0.0/1.0, 3.0/16.0, 1.0/16.0, 0.0/1.0], [3.0/16.0, 9.0/16.0, 0.0/1.0, 1.0/16.0, 3.0/16.0, 0.0/1.0], [1.0/16.0, 3.0/16.0, 0.0/1.0, 3.0/16.0, 9.0/16.0, 0.0/1.0], [3.0/16.0, 1.0/16.0, 0.0/1.0, 9.0/16.0, 3.0/16.0, 0.0/1.0]], [[0.0/1.0, 9.0/16.0, 3.0/16.0, 0.0/1.0, 3.0/16.0, 1.0/16.0], [0.0/1.0, 3.0/16.0, 9.0/16.0, 0.0/1.0, 1.0/16.0, 3.0/16.0], [0.0/1.0, 1.0/16.0, 3.0/16.0, 0.0/1.0, 3.0/16.0, 9.0/16.0], [0.0/1.0, 3.0/16.0, 1.0/16.0, 0.0/1.0, 9.0/16.0, 3.0/16.0]]], dtype=np.object)
@staticmethod
def _is(elem):
if len(elem.vertices) == 6:
return True
else:
return False
@staticmethod
def getInnerFaceAreaVector(local, elementCentroid, elementVertices):
# Vertices indexes
b = elementVertices[ (Prism.innerFaceNeighborVertices[local])[0] ]
f = elementVertices[ (Prism.innerFaceNeighborVertices[local])[1] ]
q = elementVertices[ (Prism.innerFaceNeighborVertices[local])[2] ]
w = elementVertices[ (Prism.innerFaceNeighborVertices[local])[3] ]
e = elementVertices[ (Prism.innerFaceNeighborVertices[local])[4] ]
r = elementVertices[ (Prism.innerFaceNeighborVertices[local])[5] ]
# Element centroid
x0, y0, z0 = elementCentroid.getCoordinates()
# Facet [f-b-q-w] centroid
x1, y1, z1 = ( (b + f + q + w)/4.0 ).getCoordinates()
# Edge [f-b] midpoint
x2, y2, z2 = ( (b + f)/2.0 ).getCoordinates()
# Facet [f-b-e] or [f-b-e-r] centroid
if local < 6:
x3, y3, z3 = ( (b + f + e)/3.0 ).getCoordinates()
else:
x3, y3, z3 = ( (b + f + e + r)/4.0 ).getCoordinates()
# Face area vector components
x = 0.5 * ((y1-y0)*(z3-z0) - (y3-y0)*(z1-z0) + (y3-y2)*(z1-z2) - (y1-y2)*(z3-z2))
y = 0.5 * ((x3-x0)*(z1-z0) - (x1-x0)*(z3-z0) + (x1-x2)*(z3-z2) - (x3-x2)*(z1-z2))
z = 0.5 * ((x1-x0)*(y3-y0) - (x3-x0)*(y1-y0) + (x3-x2)*(y1-y2) - (x1-x2)*(y3-y2))
return Point(x, y, z)
class Pyramid:
dimension = 3
numberOfInnerFaces = 8
numberOfFacets = 5
subelementTransformedVolumes = np.array([1.0/18.0, 1.0/18.0, 1.0/18.0, 1.0/18.0, 1.0/9.0])
innerFaceShapeFunctionValues = np.array([[13.0/36.0, 13.0/36.0, 1.0/12.0, 1.0/12.0, 1.0/9.0], [1.0/12.0, 13.0/36.0, 13.0/36.0, 1.0/12.0, 1.0/9.0], [1.0/12.0, 1.0/12.0, 13.0/36.0, 13.0/36.0, 1.0/9.0], [13.0/36.0, 1.0/12.0, 1.0/12.0, 13.0/36.0, 1.0/9.0], [6.0/17.0, 5.0/34.0, 25.0/408.0, 5.0/34.0, 7.0/24.0], [5.0/34.0, 6.0/17.0, 5.0/34.0, 25.0/408.0, 7.0/24.0], [25.0/408.0, 5.0/34.0, 6.0/17.0, 5.0/34.0, 7.0/24.0], [5.0/34.0, 25.0/408.0, 5.0/34.0, 6.0/17.0, 7.0/24.0]])
innerFaceShapeFunctionDerivatives = np.array([[[-13.0/16.0, -1.0/2.0, -1.0/4.0], [13.0/16.0, -1.0/2.0, -1.0/4.0], [3.0/16.0, 1.0/2.0, -1.0/4.0], [-3.0/16.0, 1.0/2.0, -1.0/4.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/2.0, -3.0/16.0, -1.0/4.0], [1.0/2.0, -13.0/16.0, -1.0/4.0], [1.0/2.0, 13.0/16.0, -1.0/4.0], [-1.0/2.0, 3.0/16.0, -1.0/4.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-3.0/16.0, -1.0/2.0, -1.0/4.0], [3.0/16.0, -1.0/2.0, -1.0/4.0], [13.0/16.0, 1.0/2.0, -1.0/4.0], [-13.0/16.0, 1.0/2.0, -1.0/4.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/2.0, -13.0/16.0, -1.0/4.0], [1.0/2.0, -3.0/16.0, -1.0/4.0], [1.0/2.0, 3.0/16.0, -1.0/4.0], [-1.0/2.0, 13.0/16.0, -1.0/4.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-12.0/17.0, -12.0/17.0, -60.0/289.0], [12.0/17.0, -5.0/17.0, -169.0/578.0], [5.0/17.0, 5.0/17.0, -60.0/289.0], [-5.0/17.0, 12.0/17.0, -169.0/578.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-12.0/17.0, -5.0/17.0, -169.0/578.0], [12.0/17.0, -12.0/17.0, -60.0/289.0], [5.0/17.0, 12.0/17.0, -169.0/578.0], [-5.0/17.0, 5.0/17.0, -60.0/289.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-5.0/17.0, -5.0/17.0, -60.0/289.0], [5.0/17.0, -12.0/17.0, -169.0/578.0], [12.0/17.0, 12.0/17.0, -60.0/289.0], [-12.0/17.0, 5.0/17.0, -169.0/578.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-5.0/17.0, -12.0/17.0, -169.0/578.0], [5.0/17.0, -5.0/17.0, -60.0/289.0], [12.0/17.0, 5.0/17.0, -169.0/578.0], [-12.0/17.0, 12.0/17.0, -60.0/289.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]]])
innerFaceNeighborVertices = np.array([[0, 1], [1, 2], [2, 3], [3, 0], [0, 4, 3, 1], [1, 4, 0, 2], [2, 4, 1, 3], [3, 4, 2, 0]], dtype=np.object)
subelementShapeFunctionValues = np.array([[75.0/164.0, 55.0/328.0, 121.0/1968.0, 55.0/328.0, 7.0/48.0], [55.0/328.0, 75.0/164.0, 55.0/328.0, 121.0/1968.0, 7.0/48.0], [121.0/1968.0, 55.0/328.0, 75.0/164.0, 55.0/328.0, 7.0/48.0], [55.0/328.0, 121.0/1968.0, 55.0/328.0, 75.0/164.0, 7.0/48.0], [11.0/96.0, 11.0/96.0, 11.0/96.0, 11.0/96.0, 13.0/24.0]])
subelementShapeFunctionDerivatives = np.array([[[-30.0/41.0, -30.0/41.0, -330.0/1681.0], [30.0/41.0, -11.0/41.0, -1021.0/3362.0], [11.0/41.0, 11.0/41.0, -330.0/1681.0], [-11.0/41.0, 30.0/41.0, -1021.0/3362.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-30.0/41.0, -11.0/41.0, -1021.0/3362.0], [30.0/41.0, -30.0/41.0, -330.0/1681.0], [11.0/41.0, 30.0/41.0, -1021.0/3362.0], [-11.0/41.0, 11.0/41.0, -330.0/1681.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-11.0/41.0, -11.0/41.0, -330.0/1681.0], [11.0/41.0, -30.0/41.0, -1021.0/3362.0], [30.0/41.0, 30.0/41.0, -330.0/1681.0], [-30.0/41.0, 11.0/41.0, -1021.0/3362.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-11.0/41.0, -30.0/41.0, -1021.0/3362.0], [11.0/41.0, -11.0/41.0, -330.0/1681.0], [30.0/41.0, 11.0/41.0, -1021.0/3362.0], [-30.0/41.0, 30.0/41.0, -330.0/1681.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]], [[-1.0/2.0, -1.0/2.0, -1.0/4.0], [1.0/2.0, -1.0/2.0, -1.0/4.0], [1.0/2.0, 1.0/2.0, -1.0/4.0], [-1.0/2.0, 1.0/2.0, -1.0/4.0], [0.0/1.0, 0.0/1.0, 1.0/1.0]]])
facetVerticesIndexes = np.array([[0, 4, 3], [0, 1, 4], [1, 2, 4], [2, 3, 4], [0, 3, 2, 1]], dtype=np.object)
outerFaceShapeFunctionValues = np.array([[[7.0/12.0, 0.0/1.0, 0.0/1.0, 5.0/24.0, 5.0/24.0], [5.0/24.0, 0.0/1.0, 0.0/1.0, 5.0/24.0, 7.0/12.0], [5.0/24.0, 0.0/1.0, 0.0/1.0, 7.0/12.0, 5.0/24.0]], [[7.0/12.0, 5.0/24.0, 0.0/1.0, 0.0/1.0, 5.0/24.0], [5.0/24.0, 7.0/12.0, 0.0/1.0, 0.0/1.0, 5.0/24.0], [5.0/24.0, 5.0/24.0, 0.0/1.0, 0.0/1.0, 7.0/12.0]], [[0.0/1.0, 7.0/12.0, 5.0/24.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 5.0/24.0, 7.0/12.0, 0.0/1.0, 5.0/24.0], [0.0/1.0, 5.0/24.0, 5.0/24.0, 0.0/1.0, 7.0/12.0]], [[0.0/1.0, 0.0/1.0, 7.0/12.0, 5.0/24.0, 5.0/24.0], [0.0/1.0, 0.0/1.0, 5.0/24.0, 7.0/12.0, 5.0/24.0], [0.0/1.0, 0.0/1.0, 5.0/24.0, 5.0/24.0, 7.0/12.0]], [[9.0/16.0, 3.0/16.0, 1.0/16.0, 3.0/16.0, 0.0/1.0], [3.0/16.0, 1.0/16.0, 3.0/16.0, 9.0/16.0, 0.0/1.0], [1.0/16.0, 3.0/16.0, 9.0/16.0, 3.0/16.0, 0.0/1.0], [3.0/16.0, 9.0/16.0, 3.0/16.0, 1.0/16.0, 0.0/1.0]]], dtype=np.object)
@staticmethod
def _is(elem):
if len(elem.vertices) == 5:
return True
else:
return False
@staticmethod
def getInnerFaceAreaVector(local, elementCentroid, elementVertices):
# Vertices indices
b = Pyramid.innerFaceNeighborVertices[local][0]
f = Pyramid.innerFaceNeighborVertices[local][1]
# Base centroid
x0 = 0.25 * (elementVertices[0].x + elementVertices[1].x + elementVertices[2].x + elementVertices[3].x)
y0 = 0.25 * (elementVertices[0].y + elementVertices[1].y + elementVertices[2].y + elementVertices[3].y)
z0 = 0.25 * (elementVertices[0].z + elementVertices[1].z + elementVertices[2].z + elementVertices[3].z)
# Edge {f-b} midpoint
x2 = 0.5 * (elementVertices[b].x + elementVertices[f].x)
y2 = 0.5 * (elementVertices[b].y + elementVertices[f].y)
z2 = 0.5 * (elementVertices[b].z + elementVertices[f].z)
if local < 4:
# Facet {f-b-4} centroid
x1 = (elementVertices[b].x + elementVertices[f].x + elementVertices[4].x) / 3.0
y1 = (elementVertices[b].y + elementVertices[f].y + elementVertices[4].y) / 3.0
z1 = (elementVertices[b].z + elementVertices[f].z + elementVertices[4].z) / 3.0
# Face area vector components
x = 0.5 * ((y1-y0)*(z2-z0) - (y2-y0)*(z1-z0))
y = 0.5 * ((x2-x0)*(z1-z0) - (x1-x0)*(z2-z0))
z = 0.5 * ((x1-x0)*(y2-y0) - (x2-x0)*(y1-y0))
else:
# Auxiliar vertices
q = Pyramid.innerFaceNeighborVertices[local][2]
w = Pyramid.innerFaceNeighborVertices[local][3]
# Facet {f-b-q} centroid
x1 = (elementVertices[b].x + elementVertices[f].x + elementVertices[q].x) / 3.0
y1 = (elementVertices[b].y + elementVertices[f].y + elementVertices[q].y) / 3.0
z1 = (elementVertices[b].z + elementVertices[f].z + elementVertices[q].z) / 3.0
# Facet {f-b-w} centroid
x3 = (elementVertices[b].x + elementVertices[f].x + elementVertices[w].x) / 3.0
y3 = (elementVertices[b].y + elementVertices[f].y + elementVertices[w].y) / 3.0
z3 = (elementVertices[b].z + elementVertices[f].z + elementVertices[w].z) / 3.0
# Face area vector components
x = 0.5 * ((y1-y0)*(z3-z0) - (y3-y0)*(z1-z0) + (y3-y2)*(z1-z2) - (y1-y2)*(z3-z2))
y = 0.5 * ((x3-x0)*(z1-z0) - (x1-x0)*(z3-z0) + (x1-x2)*(z3-z2) - (x3-x2)*(z1-z2))
z = 0.5 * ((x1-x0)*(y3-y0) - (x3-x0)*(y1-y0) + (x3-x2)*(y1-y2) - (x1-x2)*(y3-y2))
return Point(x, y, z)
| 133.762097
| 3,096
| 0.507431
| 9,845
| 33,173
| 1.709192
| 0.014525
| 0.128009
| 0.188091
| 0.087478
| 0.877221
| 0.822012
| 0.808819
| 0.784156
| 0.77447
| 0.763654
| 0
| 0.358985
| 0.120972
| 33,173
| 247
| 3,097
| 134.303644
| 0.218073
| 0.013626
| 0
| 0.384236
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064039
| false
| 0
| 0.009852
| 0.004926
| 0.53202
| 0
| 0
| 0
| 1
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 13
|
c2c73ae6bb4437bc0050c6e91b591593ac38b976
| 5,909
|
py
|
Python
|
robolib/networks/configurations.py
|
Obyoxar/RobolabStatistics
|
08343ca3ac49df7efdac33692d7cc4b783e851f5
|
[
"MIT"
] | 2
|
2017-11-30T21:12:11.000Z
|
2017-12-01T07:52:43.000Z
|
robolib/networks/configurations.py
|
Obyoxar/RobolabStatistics
|
08343ca3ac49df7efdac33692d7cc4b783e851f5
|
[
"MIT"
] | 14
|
2017-11-14T18:12:53.000Z
|
2018-06-03T16:07:57.000Z
|
robolib/networks/configurations.py
|
Obyoxar/RobolabStatistics
|
08343ca3ac49df7efdac33692d7cc4b783e851f5
|
[
"MIT"
] | 3
|
2018-02-05T10:40:03.000Z
|
2018-02-09T09:29:19.000Z
|
from keras.optimizers import SGD, RMSprop
from keras.models import Sequential, Model, load_model
from keras.layers import Input, Dense, Dropout, Conv2D, Flatten, BatchNormalization, Lambda, MaxPooling2D
class NetConfig:
def create_base(self, input_d):
pass
def get_input_dim(self, input_image_size, input_to_output_stride, insets):
pass
def new_optimizer(self):
pass
class ClassicConfig(NetConfig):
def __init__(self):
pass
def create_base(self, input_d):
print("Generating ClassicConfig")
seq = Sequential()
seq.add(Dense(200, activation='linear', input_shape=input_d))
seq.add(Dense(100, activation='linear'))
seq.add(Dropout(0.2))
seq.add(Dense(50, activation='linear'))
return seq
def get_input_dim(self, input_image_size, input_to_output_stride, insets):
return ((int(input_image_size[0] / input_to_output_stride) - insets[1] - insets[3]) *
(int(input_image_size[1] / input_to_output_stride) - insets[0] - insets[2]),)
def new_optimizer(self):
return RMSprop()
class ConvolutionalConfig(NetConfig):
def __init__(self):
pass
def create_base(self, input_d):
print("Generating ConvolutionalConfig")
seq = Sequential()
seq.add(Conv2D(filters=9, kernel_size=(2, 2), strides=(1, 1), activation='relu', input_shape=input_d))
seq.add(Flatten())
seq.add(Dense(200, activation='linear'))
seq.add(Dense(100, activation='linear'))
seq.add(Dropout(0.2))
seq.add(Dense(50, activation='linear'))
return seq
def get_input_dim(self, input_image_size, input_to_output_stride, insets):
return (int(input_image_size[0] / input_to_output_stride) - insets[1] - insets[3],
int(input_image_size[1] / input_to_output_stride) - insets[0] - insets[2], 1)
def new_optimizer(self):
return RMSprop()
class MultiConvConfig(NetConfig):
def __init__(self):
pass
def create_base(self, input_d):
print("Generating MultiConvConfig")
seq = Sequential()
seq.add(Conv2D(filters=4, kernel_size=(3, 3), strides=(1, 1), activation='relu', input_shape=input_d))
seq.add(BatchNormalization())
seq.add(Dropout(0.2))
seq.add(Conv2D(8, (3, 3), activation='relu'))
seq.add(BatchNormalization())
seq.add(Dropout(0.2))
seq.add(Conv2D(8, (3, 3), activation='relu'))
seq.add(BatchNormalization())
seq.add(Dropout(0.2))
seq.add(Flatten())
seq.add(Dense(500, activation='relu'))
seq.add(Dropout(0.2))
seq.add(Dense(500, activation='relu'))
seq.add(Dropout(0.2))
seq.add(Dense(50, activation='relu')) # Why nan in loss when this is increased?
return seq
def get_input_dim(self, input_image_size, input_to_output_stride, insets):
return (int(input_image_size[0] / input_to_output_stride) - insets[1] - insets[3],
int(input_image_size[1] / input_to_output_stride) - insets[0] - insets[2], 1)
def new_optimizer(self):
return RMSprop()
class VGG19ish(NetConfig):
def __init__(self):
pass
def create_base(self, input_d):
print("Generating VGG19ish")
seq = Sequential()
seq.add(Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2), padding='same',
activation='relu', input_shape=input_d, name="conv1"))
print("conv1 {0}".format(seq.output_shape))
seq.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same', name="pool1"))
print("pool1 {0}".format(seq.output_shape))
seq.add(BatchNormalization())
seq.add(Conv2D(filters=192, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu', name="conv2"))
print("conv2 {0}".format(seq.output_shape))
seq.add(BatchNormalization())
seq.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same', name="pool2"))
print("pool2 {0}".format(seq.output_shape))
seq.add(Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu', name="conv3"))
print("conv3 {0}".format(seq.output_shape))
seq.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same', name="pool3"))
print("pool3 {0}".format(seq.output_shape))
seq.add(Conv2D(filters=256, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu', name="conv4")) # 4
print("conv4 {0}".format(seq.output_shape))
seq.add(Conv2D(filters=256, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu', name="conv5")) # 5
print("conv5 {0}".format(seq.output_shape))
seq.add(Conv2D(filters=256, kernel_size=(3, 3), strides=(1, 1), padding='same', activation='relu', name="conv6")) # 6
print("conv6 {0}".format(seq.output_shape))
seq.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same', name="pool4"))
print("pool4 {0}".format(seq.output_shape))
seq.add(Flatten(input_shape=(3, 4, 256), name='concat'))
print("concat {0}".format(seq.output_shape))
seq.add(Dense(4096, activation='relu', name="fc1"))
print("fc1 {0}".format(seq.output_shape))
seq.add(Dense(4096, activation='relu', name="fc2"))
print("fc2 {0}".format(seq.output_shape))
seq.add(Dense(128, activation='relu', name="fc3"))
print("fc3 {0}".format(seq.output_shape))
return seq
def get_input_dim(self, input_image_size, input_to_output_stride, insets):
return (int(input_image_size[0] / input_to_output_stride) - insets[1] - insets[3],
int(input_image_size[1] / input_to_output_stride) - insets[0] - insets[2], 1)
def new_optimizer(self):
return SGD() # LR = 0.01
| 40.472603
| 126
| 0.631748
| 801
| 5,909
| 4.483146
| 0.129838
| 0.068505
| 0.038986
| 0.062378
| 0.809524
| 0.798106
| 0.731551
| 0.710387
| 0.701476
| 0.690337
| 0
| 0.047119
| 0.209849
| 5,909
| 145
| 127
| 40.751724
| 0.721996
| 0.009308
| 0
| 0.570175
| 0
| 0
| 0.072674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.061404
| 0.026316
| 0.070175
| 0.342105
| 0.157895
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6c041f99dc5927f1e620d157d3844278afcfec08
| 2,852
|
py
|
Python
|
python-utils/test/test_byte.py
|
wgy1109/python-utils
|
796f977fd5244bda881968839b9b87317d4743e6
|
[
"MIT"
] | 1
|
2019-10-22T09:04:40.000Z
|
2019-10-22T09:04:40.000Z
|
python-utils/test/test_byte.py
|
wgy1109/python-utils
|
796f977fd5244bda881968839b9b87317d4743e6
|
[
"MIT"
] | null | null | null |
python-utils/test/test_byte.py
|
wgy1109/python-utils
|
796f977fd5244bda881968839b9b87317d4743e6
|
[
"MIT"
] | null | null | null |
from kafka import KafkaConsumer
#b'\x02\x18001704107970\x02\x08null\x02@A4026FD47174E543EA41C538A8B430C6\x02\x16X7mOenlFCE4\x02\x1a1571118663679\x02\x00\x02\x12115930382\x02\x1038949603\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x0e1000505\x02\x022\x02\x00\x02\x00\x02\x00\x02\xbe\x0b{"acc":"0","devType":"01001G01","affix_speed":"0.0","msgType":"56","subpck":"0","convertedLatitude":"38.95663134801293","wireless_signal_strength":"31","high_beam":"0","latitude":"38949603","retain":"0","msgId":"","gnss_num":"0","phoneNo":"001704107970","msgLen":"0","batholith":"2","wireless_signal_status":"3","alertflag":"0","encrypt":"0","protocolVersion":"0","sn":"0","direction":"0","timestamp":"1571118663679","height":"5","longitude":"115930382","loc_mileage":"34727.1","left_turn_signal":"0","sz":"1","srcmsgid":"512","gps_speed":"0.0","oilMass":"4","right_turn_signal":"0","msglen":"94","dipped_headlight":"0","srcsn":"353","convertedLongitude":"115.94260189033328","onOrOff":"0","time":"1571118661000","status":"2148270080"}'
# print(b'\x02\x18001704107970\x02\x08null\x02@A4026FD47174E543EA41C538A8B430C6\x02\x16X7mOenlFCE4\x02\x1a1571118663679\x02\x00\x02\x12115930382\x02\x1038949603\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x0e1000505\x02\x022\x02\x00\x02\x00\x02\x00\x02\xbe\x0b{"acc":"0","devType":"01001G01","affix_speed":"0.0","msgType":"56","subpck":"0","convertedLatitude":"38.95663134801293","wireless_signal_strength":"31","high_beam":"0","latitude":"38949603","retain":"0","msgId":"","gnss_num":"0","phoneNo":"001704107970","msgLen":"0","batholith":"2","wireless_signal_status":"3","alertflag":"0","encrypt":"0","protocolVersion":"0","sn":"0","direction":"0","timestamp":"1571118663679","height":"5","longitude":"115930382","loc_mileage":"34727.1","left_turn_signal":"0","sz":"1","srcmsgid":"512","gps_speed":"0.0","oilMass":"4","right_turn_signal":"0","msglen":"94","dipped_headlight":"0","srcsn":"353","convertedLongitude":"115.94260189033328","onOrOff":"0","time":"1571118661000","status":"2148270080"}'.decode('utf-8', errors='ignore'))
# print(b'\x02\x18001704107970\x02\x08null\x02@A4026FD47174E543EA41C538A8B430C6\x02\x16X7mOenlFCE4\x02\x1a1571118663679\x02\x00\x02\x12115930382\x02\x1038949603\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x0e1000505\x02\x022\x02\x00\x02\x00\x02\x00\x02\xbe\x0b'.decode('utf-8', errors='ignore'))
consumer = KafkaConsumer('shadow_data_topic12',
group_id='group_sdt_java_save_pgsql',
bootstrap_servers=['192.168.1.6:9092',
'192.168.1.7:9092'])
for message in consumer:
print("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition,
message.offset, message.key,
message.value.decode('utf-8')))
| 167.764706
| 1,031
| 0.681978
| 373
| 2,852
| 5.117962
| 0.316354
| 0.084861
| 0.127292
| 0.113148
| 0.871661
| 0.848612
| 0.848612
| 0.848612
| 0.848612
| 0.848612
| 0
| 0.289092
| 0.080645
| 2,852
| 16
| 1,032
| 178.25
| 0.438978
| 0.810309
| 0
| 0
| 0
| 0
| 0.203455
| 0.047985
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
665915c70c62af0c4eb4dc9eea49d4834646fad5
| 538
|
py
|
Python
|
tests/stubs/models/domain/training_data_set.py
|
PSE-TECO-2020-TEAM1/e2e-ml_model-management
|
7f01a008648e25a29c639a5e16124b2e399eb821
|
[
"MIT"
] | 1
|
2021-05-04T08:46:19.000Z
|
2021-05-04T08:46:19.000Z
|
tests/stubs/models/domain/training_data_set.py
|
PSE-TECO-2020-TEAM1/e2e-ml_model-management
|
7f01a008648e25a29c639a5e16124b2e399eb821
|
[
"MIT"
] | null | null | null |
tests/stubs/models/domain/training_data_set.py
|
PSE-TECO-2020-TEAM1/e2e-ml_model-management
|
7f01a008648e25a29c639a5e16124b2e399eb821
|
[
"MIT"
] | 1
|
2022-01-28T21:21:32.000Z
|
2022-01-28T21:21:32.000Z
|
from bson.objectid import ObjectId
from app.models.domain.training_data_set import TrainingDataSet
from tests.stubs.models.domain.feature_extraction_data import get_feature_extraction_data_stub_5_1, get_feature_extraction_data_stub_4_2
def get_training_data_set_stub():
return TrainingDataSet(
last_modified=1617981582111,
sample_list_file_ID=ObjectId("607070acc7559b9ccb3335fc"),
feature_extraction_cache={"5_1": get_feature_extraction_data_stub_5_1(), "4_2": get_feature_extraction_data_stub_4_2()}
)
| 41.384615
| 136
| 0.82342
| 75
| 538
| 5.373333
| 0.426667
| 0.253102
| 0.260546
| 0.238213
| 0.30273
| 0.30273
| 0.30273
| 0
| 0
| 0
| 0
| 0.083507
| 0.109665
| 538
| 12
| 137
| 44.833333
| 0.757829
| 0
| 0
| 0
| 0
| 0
| 0.055762
| 0.04461
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| true
| 0
| 0.333333
| 0.111111
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
668d2492d0bd7a56d2b2037316aa964f01ef2f16
| 2,588
|
py
|
Python
|
dryadic/learning/stan/transcripts/stan_models.py
|
ohsu-comp-bio/dryads
|
015f6d3186a5146809334e2490c072e675b22891
|
[
"MIT"
] | null | null | null |
dryadic/learning/stan/transcripts/stan_models.py
|
ohsu-comp-bio/dryads
|
015f6d3186a5146809334e2490c072e675b22891
|
[
"MIT"
] | null | null | null |
dryadic/learning/stan/transcripts/stan_models.py
|
ohsu-comp-bio/dryads
|
015f6d3186a5146809334e2490c072e675b22891
|
[
"MIT"
] | null | null | null |
base_model = '''
data {
int<lower=1> N; // number of samples
int<lower=1> T; // number of transcript features
int<lower=1> G; // number of genetic features
matrix[N, T] expr; // RNA-seq expression values
int<lower=0, upper=1> mut[N]; // mutation status
int<lower=1, upper=T> tx_indx[G]; // transcripts per gene
real<lower=0> alpha; // regularization coefficient
real<lower=0> gamma; // Dirichlet distribution prior
}
parameters {
real intercept;
vector[G] gn_wghts;
vector[T] gn_wghts_use;
vector<lower=0, upper=1>[T] tx_wghts;
}
model {
int pos = 1;
for (g in 1:G) {
vector[tx_indx[g]] pi = segment(tx_wghts, pos, tx_indx[g]);
vector[tx_indx[g]] tx_gn = segment(gn_wghts_use, pos, tx_indx[g]);
pi = pi / sum(pi);
for (t in 1:tx_indx[g]) {
tx_gn[t] = gn_wghts[g];
}
pos = pos + tx_indx[g];
}
intercept ~ normal(0, 1.0);
gn_wghts ~ normal(0, alpha);
target += exponential_lpdf(tx_wghts | gamma);
mut ~ bernoulli_logit(intercept + expr * (tx_wghts .* gn_wghts_use));
}
'''
cauchy_model = '''
data {
int<lower=1> N; // number of samples
int<lower=1> T; // number of transcript features
int<lower=1> G; // number of genetic features
matrix[N, T] expr; // RNA-seq expression values
int<lower=0, upper=1> mut[N]; // mutation status
int<lower=1, upper=T> tx_indx[G]; // transcripts per gene
real<lower=0> alpha; // regularization coefficient
real<lower=0> gamma; // Dirichlet distribution prior
}
parameters {
real intercept;
vector[G] gn_wghts;
vector[T] gn_wghts_use;
vector<lower=0, upper=1>[T] tx_wghts;
}
model {
int pos = 1;
for (g in 1:G) {
vector[tx_indx[g]] pi = segment(tx_wghts, pos, tx_indx[g]);
vector[tx_indx[g]] tx_gn = segment(gn_wghts_use, pos, tx_indx[g]);
pi = pi / sum(pi);
for (t in 1:tx_indx[g]) {
tx_gn[t] = gn_wghts[g];
}
pos = pos + tx_indx[g];
}
intercept ~ normal(0, 1.0);
gn_wghts ~ cauchy(0, alpha);
target += exponential_lpdf(tx_wghts | gamma);
mut ~ bernoulli_logit(intercept + expr * (tx_wghts .* gn_wghts_use));
}
'''
| 27.827957
| 78
| 0.517774
| 340
| 2,588
| 3.794118
| 0.170588
| 0.065116
| 0.075969
| 0.046512
| 0.982946
| 0.982946
| 0.982946
| 0.982946
| 0.982946
| 0.982946
| 0
| 0.020335
| 0.353941
| 2,588
| 92
| 79
| 28.130435
| 0.751196
| 0
| 0
| 0.794118
| 0
| 0
| 0.982985
| 0.054911
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
66e75c4df936ac807a4ebd92b7a151ddd3d91298
| 127
|
py
|
Python
|
ckanext/zippreview/helpers.py
|
datagovau/ckanext-zippreview
|
e48ae35960e155e8467b5debfcf1aebd5d17aefd
|
[
"Apache-2.0"
] | 5
|
2017-05-15T14:38:43.000Z
|
2018-07-11T04:32:43.000Z
|
ckanext/zippreview/helpers.py
|
AusDTO/dga-ckanext-zippreview
|
e48ae35960e155e8467b5debfcf1aebd5d17aefd
|
[
"Apache-2.0"
] | null | null | null |
ckanext/zippreview/helpers.py
|
AusDTO/dga-ckanext-zippreview
|
e48ae35960e155e8467b5debfcf1aebd5d17aefd
|
[
"Apache-2.0"
] | 5
|
2020-01-16T13:15:12.000Z
|
2021-10-13T21:56:38.000Z
|
from ckanext.zippreview.utils import get_zip_tree
def get_helpers():
return {
'get_zip_tree': get_zip_tree
}
| 15.875
| 49
| 0.692913
| 18
| 127
| 4.5
| 0.611111
| 0.222222
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228346
| 127
| 7
| 50
| 18.142857
| 0.826531
| 0
| 0
| 0
| 0
| 0
| 0.094488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
661353d0129a534dceb64a6af371e5ad37554b36
| 142
|
py
|
Python
|
tests/cases/__init__.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2018-10-05T17:03:01.000Z
|
2018-10-05T17:03:01.000Z
|
tests/cases/__init__.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2017-01-06T19:20:32.000Z
|
2017-01-06T19:20:32.000Z
|
tests/cases/__init__.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 7
|
2016-12-16T15:42:05.000Z
|
2020-09-05T01:11:27.000Z
|
from .api_test_case import APITestCase
from .assertions_mixin import AssertionsMixin
from .model_assertions_mixin import ModelAssertionsMixin
| 35.5
| 56
| 0.894366
| 17
| 142
| 7.176471
| 0.647059
| 0.245902
| 0.344262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084507
| 142
| 3
| 57
| 47.333333
| 0.938462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6613df8249ad683944bb0ed1d3f8b99bb58cdb70
| 62
|
py
|
Python
|
test/test_check.py
|
siddht4/youtube_dl_custom
|
648d01c1cdc7b3e11b4f812520fa39474719cdfc
|
[
"Unlicense"
] | 2
|
2017-07-09T03:30:39.000Z
|
2017-07-09T16:20:05.000Z
|
test/test_check.py
|
siddht4/youtube_dl_custom
|
648d01c1cdc7b3e11b4f812520fa39474719cdfc
|
[
"Unlicense"
] | 3
|
2017-07-15T07:28:56.000Z
|
2017-10-29T09:07:58.000Z
|
test/test_check.py
|
siddht4/youtube_dl_custom
|
648d01c1cdc7b3e11b4f812520fa39474719cdfc
|
[
"Unlicense"
] | 6
|
2017-07-15T07:17:29.000Z
|
2018-03-13T07:31:18.000Z
|
import youtube_dl
print(youtube_dl)
print(youtube_dl.version)
| 15.5
| 25
| 0.854839
| 10
| 62
| 5
| 0.5
| 0.54
| 0.56
| 0.84
| 0.74
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 62
| 3
| 26
| 20.666667
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
66170ba4b1d30ac965dc183689df1ffd508f19fc
| 108
|
py
|
Python
|
src/scripts/__init__.py
|
jokteur/BM-Segmenter
|
210e15c41bd2b944594d1bc53fe7e43c518a5035
|
[
"MIT"
] | 5
|
2020-10-12T09:17:57.000Z
|
2021-12-28T13:38:05.000Z
|
src/scripts/__init__.py
|
jokteur/BM-Segmenter
|
210e15c41bd2b944594d1bc53fe7e43c518a5035
|
[
"MIT"
] | 1
|
2020-11-13T10:16:56.000Z
|
2020-12-16T21:26:19.000Z
|
src/scripts/__init__.py
|
jokteur/BM-Segmenter
|
210e15c41bd2b944594d1bc53fe7e43c518a5035
|
[
"MIT"
] | null | null | null |
from .load_dicom import load_scan_from_dicom, get_pixels_hu
__all__ = [load_scan_from_dicom, get_pixels_hu]
| 36
| 59
| 0.861111
| 19
| 108
| 4.105263
| 0.473684
| 0.205128
| 0.307692
| 0.435897
| 0.717949
| 0.717949
| 0.717949
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 108
| 3
| 60
| 36
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
663c17a8d59d6b56e82373530221b45385b2933c
| 60,818
|
py
|
Python
|
accounts/tests/test_api.py
|
CMU-TRP/podd-api
|
6eb5c4598f848f75d131287163cd9babf2a0a0fc
|
[
"MIT"
] | 3
|
2020-04-26T06:28:50.000Z
|
2021-04-05T08:02:26.000Z
|
accounts/tests/test_api.py
|
CMU-TRP/podd-api
|
6eb5c4598f848f75d131287163cd9babf2a0a0fc
|
[
"MIT"
] | 10
|
2020-06-05T17:36:10.000Z
|
2022-03-11T23:16:42.000Z
|
accounts/tests/test_api.py
|
CMU-TRP/podd-api
|
6eb5c4598f848f75d131287163cd9babf2a0a0fc
|
[
"MIT"
] | 5
|
2021-04-08T08:43:49.000Z
|
2021-11-27T06:36:46.000Z
|
# -*- encoding: utf-8 -*-
import datetime
import json
import urllib2
from django.conf import settings
from django.core.files import File
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.test.client import encode_multipart
from mock import patch
from mock import mock_open
from rest_framework.test import APITestCase
from accounts.models import UserDevice, User, Authority, UserCode
from common import factory
from common.constants import (GROUP_WORKING_TYPE_ALERT_REPORT_ADMINSTRATION_AREA,
GROUP_WORKING_TYPE_ALERT_REPORT_REPORT_TYPE, USER_STATUS_VOLUNTEER,
USER_STATUS_PODD, USER_STATUS_LIVESTOCK, USER_STATUS_PUBLIC_HEALTH, USER_STATUS_ADDITION_VOLUNTEER)
from reports.models import Report, ReportImage, ReportComment
def mock_upload_to_s3(file):
return 'http://2.bp.blogspot.com/-_NbC8XQ05jQ/UVly-ZzBK0I/AAAAAAAABtA/fETW0ixUnX0/s1600/image.jpg'
def mock_facebook_graph_get_object(self, id, **args):
return {
u'picture': {
u'data': {
u'url': u'https://fbcdn-profile-a.akamaihd.net/hprofile-ak-xap1/v/t1.0-1/p200x200/1469737_693160797374375_1503926674_n.jpg?oh=3f0222140bc6e623991454b0c1010175&oe=56C75B45&__gda__=1452604903_646f4de4341bdcf0124ab82dee6f3d52',
u'is_silhouette': False
}
},
u'id': u'603719628',
u'name': u'Taeyeon Kim',
u'email': u'taeyeon_kim@hotmail.com'
}
def get_temporary_file():
m = mock_open()
with patch('__main__.open', m, create=True):
temporary_file = open('/tmp/hello.world.jpg', 'w')
file = File(temporary_file)
file.write(urllib2.urlopen('http://www.yespetshop.com/private_folder/kitten-1.jpg').read())
file.closed
temporary_file.closed
return temporary_file
get_temporary_file()
class TestApiLogin(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user(password='password')
self.jessica = factory.create_user()
self.perm1 = factory.create_custom_permission()
self.perm2 = factory.create_custom_permission()
self.perm3 = factory.create_custom_permission()
def test_api_get_login(self):
response = self.client.get(reverse('obtain_auth_token'))
self.assertEqual(response.status_code, 405)
def test_api_post_login(self):
params = {
'username': self.taeyeon.username,
'password': 'password',
}
response = self.client.post(reverse('obtain_auth_token'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['username'], self.taeyeon.username)
self.assertEqual(response_json['firstName'], self.taeyeon.first_name)
self.assertEqual(response_json['lastName'], self.taeyeon.last_name)
self.assertEqual(response_json['status'], self.taeyeon.status)
self.assertEqual(response_json['authorityAdmins'], [])
self.assertEqual(response_json['isStaff'], False)
self.assertEqual(response_json['isSuperuser'], False)
self.assertTrue(response_json['token'])
def test_api_post_login_invalid(self):
params = {
'username': self.taeyeon.username,
'password': 'wrong',
}
response = self.client.post(reverse('obtain_auth_token'), params)
self.assertEqual(response.status_code, 400)
class TestApiConfiguration(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user()
self.jessica = factory.create_user()
self.group_a = factory.add_user_to_new_group_type_administration_area(user=self.taeyeon)
self.group_r = factory.add_user_to_new_group_type_report_type(user=self.taeyeon)
self.group_a2 = factory.add_user_to_new_group(user=self.taeyeon,
type=GROUP_WORKING_TYPE_ALERT_REPORT_ADMINSTRATION_AREA)
self.group_r2 = factory.add_user_to_new_group(user=self.taeyeon,
type=GROUP_WORKING_TYPE_ALERT_REPORT_REPORT_TYPE)
self.type1 = factory.create_report_type()
self.type2 = factory.create_report_type()
self.type3 = factory.create_report_type()
factory.create_group_report_type(group=self.group_r, report_type=self.type1)
factory.create_group_report_type(group=self.group_r2, report_type=self.type3)
self.area1 = factory.create_administration_area()
self.area2 = factory.create_administration_area()
self.area3 = factory.create_administration_area()
factory.create_group_administration_area(group=self.group_a, administration_area=self.area1)
factory.create_group_administration_area(group=self.group_a2, administration_area=self.area1)
factory.create_configuration(key='awsSecretKey', value='SNSD4Ever')
factory.create_configuration(key='awsAccessKey', value='TJSpinDance')
def test_api_configuration(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'androidId': 'NEXUS-5',
'deviceId': 'AXcdEsddeR',
'brand': 'Samsung',
'model': 'Galaxy',
'wifiMac': 'AcsdE-Bcsads',
}
response = self.client.post(reverse('configuration'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['fullName'], self.taeyeon.get_full_name())
self.assertEqual(response_json['awsSecretKey'], 'SNSD4Ever')
self.assertEqual(response_json['awsAccessKey'], 'TJSpinDance')
self.assertEqual(len(response_json['administrationAreas']), 1)
self.assertEqual(response_json['administrationAreas'][0]['id'], self.area1.id)
self.assertEqual(response_json['administrationAreas'][0]['name'], self.area1.name)
self.assertEqual(response_json['administrationAreas'][0]['address'], self.area1.address)
self.assertEqual(response_json['administrationAreas'][0]['parentName'], self.area1.get_parent())
self.assertEqual(response_json['administrationAreas'][0]['isLeaf'], self.area1.is_leaf())
self.assertEqual(len(response_json['reportTypes']), 1)
self.assertEqual(response_json['reportTypes'][0]['id'], self.type1.id)
self.assertEqual(response_json['reportTypes'][0]['name'], self.type1.name)
self.assertEqual(response_json['reportTypes'][0]['version'], self.type1.version)
self.assertEqual(response_json['reportTypes'][0]['definition'], json.loads(self.type1.form_definition))
device = UserDevice.objects.latest('id')
self.assertEqual(device.user, self.taeyeon)
self.assertEqual(device.android_id, 'NEXUS-5')
self.assertEqual(device.device_id, 'AXcdEsddeR')
self.assertEqual(device.brand, 'Samsung')
self.assertEqual(device.model, 'Galaxy')
self.assertEqual(device.wifi_mac, 'AcsdE-Bcsads')
def test_api_configuration_will_return_area_including_descendants_area(self):
area1_1 = self.area1.add_child(name='Namsan', location=self.area1.location)
area1_1_1 = area1_1.add_child(name='Namsan Tower', location=self.area1.location)
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'androidId': 'NEXUS-5',
'deviceId': 'AXcdEsddeR',
'brand': 'Samsung',
'model': 'Galaxy',
'wifiMac': 'AcsdE-Bcsads',
}
response = self.client.post(reverse('configuration'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json['administrationAreas']), 3)
self.assertEqual(response_json['administrationAreas'][0]['id'], self.area1.id)
self.assertEqual(response_json['administrationAreas'][0]['name'], self.area1.name)
self.assertEqual(response_json['administrationAreas'][0]['address'], self.area1.address)
self.assertEqual(response_json['administrationAreas'][1]['id'], area1_1.id)
self.assertEqual(response_json['administrationAreas'][1]['name'], area1_1.name)
self.assertEqual(response_json['administrationAreas'][1]['address'], area1_1.address)
self.assertEqual(response_json['administrationAreas'][2]['id'], area1_1_1.id)
self.assertEqual(response_json['administrationAreas'][2]['name'], area1_1_1.name)
self.assertEqual(response_json['administrationAreas'][2]['address'], area1_1_1.address)
def test_api_configuration_after_have_user_device_will_update_data(self):
self.test_api_configuration()
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'androidId': 'Xfsdefvds412xc4',
'deviceId': 'CsaCSIbaw21ce5',
'brand': 'LG',
'model': 'Curve',
'wifiMac': 'BT21:1234:2v4d:41xT',
}
response = self.client.post(reverse('configuration'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.latest('id')
self.assertEqual(device.user, self.taeyeon)
self.assertEqual(device.android_id, 'Xfsdefvds412xc4')
self.assertEqual(device.device_id, 'CsaCSIbaw21ce5')
self.assertEqual(device.brand, 'LG')
self.assertEqual(device.model, 'Curve')
self.assertEqual(device.wifi_mac, 'BT21:1234:2v4d:41xT')
def test_api_configuration_without_permission(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.jessica.auth_token.key)
params = {
'androidId': 'NEXUS-5',
'deviceId': 'AXcdEsddeR',
'brand': 'Samsung',
'model': 'Galaxy',
'wifiMac': 'AcsdE-Bcsads',
}
response = self.client.post(reverse('configuration'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['fullName'], self.jessica.get_full_name())
self.assertEqual(response_json['administrationAreas'], [])
self.assertEqual(response_json['reportTypes'], [])
def test_post_api_configuration_invalid(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.post(reverse('configuration'))
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content)
self.assertEqual(response_json['androidId'], ['This field is required.'])
self.assertEqual(response_json['deviceId'], ['This field is required.'])
self.assertEqual(response_json['brand'], ['This field is required.'])
self.assertEqual(response_json['model'], ['This field is required.'])
# self.assertEqual(response_json['wifiMac'], ['This field is required.'])
def test_anonymous_cannot_access_api_configuration(self):
response = self.client.post(reverse('configuration'))
self.assertEqual(response.status_code, 401)
class TestApiUserSearch(APITestCase):
def setUp(self):
call_command('clear_index', interactive=False, verbosity=0)
self.taeyeon = factory.create_user(username='taengu')
self.jessica = factory.create_user(username='maomao')
self.yoona = factory.create_user(username='maoyoong')
def test_api_report_search(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('users_search'))
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json), 3)
user1 = response_json[0]
self.assertEqual(user1['id'], self.jessica.id)
self.assertEqual(user1['username'], self.jessica.username)
self.assertEqual(user1['firstName'], self.jessica.first_name)
self.assertEqual(user1['lastName'], self.jessica.last_name)
self.assertEqual(user1['fullName'], self.jessica.get_full_name())
user2 = response_json[1]
self.assertEqual(user2['id'], self.yoona.id)
self.assertEqual(user2['username'], self.yoona.username)
self.assertEqual(user2['firstName'], self.yoona.first_name)
self.assertEqual(user2['lastName'], self.yoona.last_name)
self.assertEqual(user2['fullName'], self.yoona.get_full_name())
user3 = response_json[2]
self.assertEqual(user3['id'], self.taeyeon.id)
self.assertEqual(user3['username'], self.taeyeon.username)
self.assertEqual(user3['firstName'], self.taeyeon.first_name)
self.assertEqual(user3['lastName'], self.taeyeon.last_name)
self.assertEqual(user3['fullName'], self.taeyeon.get_full_name())
def test_api_report_search_by_username(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('users_search'), {
'username': 'mao'
})
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json), 2)
user1 = response_json[0]
self.assertEqual(user1['id'], self.jessica.id)
user2 = response_json[1]
self.assertEqual(user2['id'], self.yoona.id)
def test_api_report_search_by_username_case_insensitive(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('users_search'), {
'username': 'tAEng'
})
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json), 1)
user1 = response_json[0]
self.assertEqual(user1['id'], self.taeyeon.id)
def test_anonymous_cannot_access_api_list_search(self):
response = self.client.get(reverse('users_search'))
self.assertEqual(response.status_code, 401)
class TestApiGCMRegistration(APITestCase):
def setUp(self):
call_command('clear_index', interactive=False, verbosity=0)
self.taeyeon = factory.create_user()
self.jessica = factory.create_user()
self.device1 = factory.create_user_device(user=self.taeyeon)
self.device2 = factory.create_user_device(user=self.jessica)
self.device1.gcm_reg_id = ''
self.device1.save()
self.yoona = factory.create_user()
def test_post_api_gcm_registration(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'gcmRegId': 'Akcp201',
}
response = self.client.post(reverse('gcm_registration'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(id=self.device1.id)
self.assertEqual(device.gcm_reg_id, 'Akcp201')
def test_post_api_gcm_registration_already_have_gcm_reg_id_will_replace_the_old_one(self):
self.device1.gcm_reg_id = 'SS-2904'
self.device1.save()
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'gcmRegId': 'Akcp201',
}
response = self.client.post(reverse('gcm_registration'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(id=self.device1.id)
self.assertEqual(device.gcm_reg_id, 'Akcp201')
def test_post_api_gcm_registration_with_same_gcm_reg_id_will_replace_delete_old_one(self):
self.device1.gcm_reg_id = 'SS-2904'
self.device1.save()
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.jessica.auth_token.key)
params = {
'gcmRegId': 'SS-2904',
}
response = self.client.post(reverse('gcm_registration'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(user=self.jessica)
self.assertEqual(device.gcm_reg_id, 'SS-2904')
with self.assertRaises(UserDevice.DoesNotExist):
UserDevice.objects.get(user=self.taeyeon)
def test_post_api_gcm_registration_invalid(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.post(reverse('gcm_registration'))
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content)
self.assertEqual(response_json['gcmRegId'], 'This field is required.')
def test_post_api_gcm_registration_with_user_that_doesnot_have_device_will_error(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.yoona.auth_token.key)
params = {
'gcmRegId': 'Akcp201',
}
response = self.client.post(reverse('gcm_registration'), params)
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content)
self.assertEqual(response_json['detail'], 'This user does not register this device.')
def test_cannot_get_api_gcm_registration(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('gcm_registration'))
self.assertEqual(response.status_code, 405)
def test_anonymous_cannot_access_api_gcm_registration(self):
response = self.client.get(reverse('gcm_registration'))
self.assertEqual(response.status_code, 401)
class TestApiProfileImageUpload(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user()
self.jessica = factory.create_user()
# get_temporary_file()
@patch('accounts.api.upload_to_s3', mock_upload_to_s3)
def test_post_profile_image_upload(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
m = mock_open()
with patch('__main__.open', m, create=True):
send_file = open('/tmp/hello.world.jpg', 'r')
params = {
'image': send_file,
}
content = encode_multipart('BoUnDaRyStRiNg', params)
content_type = 'multipart/form-data; boundary=BoUnDaRyStRiNg'
response = self.client.post(reverse('upload_image_profile'), content, content_type=content_type)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['username'], self.taeyeon.username)
self.assertEqual(response_json['avatarUrl'], 'http://2.bp.blogspot.com/-_NbC8XQ05jQ/UVly-ZzBK0I/AAAAAAAABtA/fETW0ixUnX0/s1600/image.jpg')
self.assertEqual(response_json['thumbnailAvatarUrl'], 'http://2.bp.blogspot.com/-_NbC8XQ05jQ/UVly-ZzBK0I/AAAAAAAABtA/fETW0ixUnX0/s1600/image.jpg')
def test_cannot_get_upload_profile_image(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('upload_report_image'))
self.assertEqual(response.status_code, 405)
def test_anonymous_cannot_post_upload_profile_image(self):
response = self.client.post(reverse('upload_report_image'))
self.assertEqual(response.status_code, 401)
class TestApiPing(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user()
def test_ping_success(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('ping'))
self.assertEqual(response.status_code, 200)
def test_ping_with_no_token(self):
response = self.client.get(reverse('ping'))
self.assertEqual(response.status_code, 401)
def test_ping_with_invalid_token(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key + '530')
response = self.client.get(reverse('ping'))
self.assertEqual(response.status_code, 401)
class TestApiUserProfile(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user()
self.anonymous418 = factory.create_user(is_public=True, is_anonymous=True)
self.anonymous309 = factory.create_user(is_public=True, is_anonymous=True)
def test_access_user_profile(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('user-profile'))
self.assertEqual(response.status_code, 200)
def test_cannot_access_user_profile_with_no_token(self):
response = self.client.get(reverse('user-profile'))
self.assertEqual(response.status_code, 401)
def test_cannot_access_user_profile_with_invalid_token(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key + '418')
response = self.client.get(reverse('user-profile'))
self.assertEqual(response.status_code, 401)
def test_update_profile(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'firstName': 'Taeyeon',
'lastName': 'Kim',
'telephone': '6689345678',
'avatarUrl': 'http://placehold.it/300x300',
'thumbnailAvatarUrl': 'http://placehold.it/80x80'
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.taeyeon.id)
self.assertEqual(user.first_name, 'Taeyeon')
self.assertEqual(user.last_name, 'Kim')
self.assertEqual(user.telephone, '6689345678')
self.assertEqual(user.avatar_url, 'http://placehold.it/300x300')
self.assertEqual(user.thumbnail_avatar_url, 'http://placehold.it/80x80')
def test_update_profile_some_detail(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'firstName': 'Taeyeon Kim',
'telephone': '6689345688'
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.taeyeon.id)
self.assertEqual(user.first_name, 'Taeyeon Kim')
self.assertEqual(user.telephone, '6689345688')
def test_update_profile_with_email(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'firstName': 'Taeyeon Kim',
'telephone': '6689345688',
'email': 'taeyeon_ss@gmail.com'
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.taeyeon.id)
self.assertEqual(user.first_name, 'Taeyeon Kim')
self.assertEqual(user.telephone, '6689345688')
self.assertEqual(user.email, 'taeyeon_ss@gmail.com')
def test_update_is_anonymous_profile_with_email(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.anonymous418.auth_token.key)
params = {
'firstName': 'Jessica Jung',
'email': 'jessica@gmail.com'
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.anonymous418.id)
self.assertEqual(user.first_name, 'Jessica Jung')
self.assertEqual(user.email, 'jessica@gmail.com')
self.assertFalse(user.is_anonymous)
def test_update_is_anonymous_profile_without_email(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.anonymous418.auth_token.key)
params = {
'firstName': 'Jessica Jung'
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.anonymous418.id)
self.assertEqual(user.first_name, 'Jessica Jung')
self.assertTrue(user.is_anonymous)
@patch('facebook.GraphAPI.get_object', mock_facebook_graph_get_object)
def test_api_user_profile_facebook_connect(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.anonymous309.auth_token.key)
params = {
'facebook_access_token': 'CAACEdEose0cBAEPHQz2q46MV8a4m6Lg2',
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.anonymous309.id)
self.assertEqual(user.username, 'taeyeon_kim@hotmail.com')
self.assertEqual(user.email, 'taeyeon_kim@hotmail.com')
self.assertEqual(user.first_name, 'Taeyeon Kim')
self.assertEqual(user.avatar_url, 'https://fbcdn-profile-a.akamaihd.net/hprofile-ak-xap1/v/t1.0-1/p200x200/1469737_693160797374375_1503926674_n.jpg?oh=3f0222140bc6e623991454b0c1010175&oe=56C75B45&__gda__=1452604903_646f4de4341bdcf0124ab82dee6f3d52')
self.assertEqual(user.fbuid, '603719628')
self.assertFalse(user.is_anonymous)
@patch('facebook.GraphAPI.get_object', mock_facebook_graph_get_object)
def test_api_invalid_user_profile_facebook_connect_same_email(self):
self.taeyeon_ss = factory.create_user(is_public=True, email='taeyeon_kim@hotmail.com')
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.anonymous309.auth_token.key)
params = {
'facebook_access_token': 'CAACEdEose0cBAEPHQz2q46MV8a4m6Lg2',
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 400)
@patch('facebook.GraphAPI.get_object', mock_facebook_graph_get_object)
def test_api_invalid_user_profile_facebook_connect_facebook_id(self):
self.taeyeon.fbuid = 603719628
self.taeyeon.save()
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.anonymous309.auth_token.key)
params = {
'facebook_access_token': 'CAACEdEose0cBAEPHQz2q46MV8a4m6Lg2',
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 400)
@patch('facebook.GraphAPI.get_object', mock_facebook_graph_get_object)
def test_api_user_profile_facebook_connect_again(self):
self.anonymous309.fbuid = 603719628
self.anonymous309.save()
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.anonymous309.auth_token.key)
params = {
'facebook_access_token': 'CAACEdEose0cBAEPHQz2q46MV8a4m6Lg2',
}
response = self.client.post(reverse('user-profile'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.anonymous309.id)
self.assertEqual(user.username, self.anonymous309.username)
self.assertFalse(user.is_anonymous)
class TestApiUserPassword(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user(status='VOLUNTEER')
def test_post_update_user_password(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = { 'password': '1234' }
response = self.client.post(reverse('user_update_password'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.taeyeon.id)
self.assertEqual(user.display_password, '1234')
def test_post_update_user_password_with_zero(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = { 'password': '01234' }
response = self.client.post(reverse('user_update_password'), params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(id=self.taeyeon.id)
self.assertEqual(user.display_password, '01234')
def test_post_update_user_password_with_len_less_than_4(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = { 'password': '123' }
response = self.client.post(reverse('user_update_password'), params)
response_json = json.loads(response.content)
self.assertEqual(response.status_code, 400)
self.assertEqual(response_json['password'], 'Invalid password. Please try again. Only Number[0-9] and length > 3 (eg. 1234)')
def test_post_update_user_password_with_wrong_type(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = { 'password': 'abcdef' }
response = self.client.post(reverse('user_update_password'), params)
response_json = json.loads(response.content)
self.assertEqual(response.status_code, 400)
self.assertEqual(response_json['password'], 'Invalid password. Please try again. Only Number[0-9] and length > 3 (eg. 1234)')
def test_post_update_user_password_with_no_input(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.post(reverse('user_update_password'))
response_json = json.loads(response.content)
self.assertEqual(response.status_code, 400)
self.assertEqual(response_json['password'], 'Invalid password. Please try again. Only Number[0-9] and length > 3 (eg. 1234)')
def test_cannot_access_user_password(self):
params = { 'password': '1234' }
response = self.client.post(reverse('user_update_password'), params)
self.assertEqual(response.status_code, 401)
class TestApiConfiguration(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user(is_staff=True)
self.jessica = factory.create_user()
factory.create_configuration(system='snsd', key='kim', value='taeyeon')
factory.create_configuration(system='sms', key='username', value='koyoyo')
factory.create_configuration(system='sms', key='password', value='pass')
def test_get_list_configurations(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'system': 'sms'
}
response = self.client.get(reverse('list_configuration'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['username'], 'koyoyo')
self.assertEqual(response_json['password'], 'pass')
def test_get_list_configurations_invalid(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('list_configuration'))
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content)
self.assertEqual(response_json['system'], 'System is required.')
def test_not_staff_user_cannot_access_api_configuration(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.jessica.auth_token.key)
response = self.client.get(reverse('list_configuration'))
self.assertEqual(response.status_code, 401)
def test_anonymous_cannot_access_api_configuration(self):
response = self.client.get(reverse('list_configuration'))
self.assertEqual(response.status_code, 401)
class TestApiRegistrationByAutority(APITestCase):
def setUp(self):
call_command('log_action_create', interactive=False, verbosity=0)
self.taeyeon = factory.create_user(telephone="0841299999")
self.jessica = factory.create_user()
self.authority = factory.create_authority()
self.invitation_code = self.authority.get_invite().code
def test_get_autority_by_no_invitation_code(self):
response = self.client.get(reverse('get_authority_by_invitation_code'))
self.assertEqual(response.status_code, 400)
def test_get_autority_by_wrong_invitation_code(self):
params = { 'invitationCode': 'AA-1123' }
response = self.client.get(reverse('get_authority_by_invitation_code'), params)
self.assertEqual(response.status_code, 400)
def test_get_authority_by_invitation_code(self):
params = { 'invitationCode': self.invitation_code }
response = self.client.get(reverse('get_authority_by_invitation_code'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['code'], self.authority.code)
self.assertEqual(response_json['name'], self.authority.name)
def test_registration_user(self):
params = {
'firstName': 'yoona',
'lastName': 'im',
'serialNumber': '0000000000530',
'telephone': '0800000530',
'authority': self.invitation_code
}
response = self.client.post(reverse('user_register_by_authority'), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertEqual(response_json['firstName'], 'yoona')
self.assertEqual(response_json['lastName'], 'im')
self.assertEqual(response_json['status'], USER_STATUS_ADDITION_VOLUNTEER)
user = User.objects.latest('id')
self.assertEqual(user.first_name, 'yoona')
self.assertEqual(user.last_name, 'im')
self.assertEqual(user.status, USER_STATUS_ADDITION_VOLUNTEER)
authority = Authority.objects.get(id=self.authority.id)
self.assertEqual(authority.users.count(), 1)
user_authority1 = authority.users.all()[0]
self.assertEqual(user_authority1.first_name, 'yoona')
self.assertEqual(user_authority1.last_name, 'im')
def test_registration_user_with_status(self):
params = {
'firstName': 'yoona',
'lastName': 'im',
'serialNumber': '0000000000530',
'telephone': '0800000530',
'authority': self.invitation_code,
'status': USER_STATUS_PODD
}
response = self.client.post(reverse('user_register_by_authority'), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertEqual(response_json['firstName'], 'yoona')
self.assertEqual(response_json['lastName'], 'im')
self.assertEqual(response_json['status'], USER_STATUS_PODD)
user = User.objects.latest('id')
self.assertEqual(user.first_name, 'yoona')
self.assertEqual(user.last_name, 'im')
self.assertEqual(user.status, USER_STATUS_PODD)
authority = Authority.objects.get(id=self.authority.id)
self.assertEqual(authority.users.count(), 1)
user_authority1 = authority.users.all()[0]
self.assertEqual(user_authority1.first_name, 'yoona')
self.assertEqual(user_authority1.last_name, 'im')
def test_registration_user_with_no_authority(self):
params = {
'firstName': 'yoona',
'lastName': 'im',
'serialNumber': '0000000000530',
'telephone': '0800000530',
}
response = self.client.post(reverse('user_register_by_authority'), params)
self.assertEqual(response.status_code, 400)
def test_registration_user_with_same_serial_number(self):
params = {
'firstName': 'krystal',
'lastName': 'jung',
'serialNumber': self.taeyeon.serial_number,
'telephone': '0800000530',
}
response = self.client.post(reverse('user_register_by_authority'), params)
self.assertEqual(response.status_code, 400)
def test_registration_user_with_same_telephone(self):
params = {
'firstName': 'krystal',
'lastName': 'jung',
'serialNumber': '1411900088888',
'telephone': '0841299999',
}
response = self.client.post(reverse('user_register_by_authority'), params)
self.assertEqual(response.status_code, 400)
'''
class TestApiRegistrationByGroup(APITestCase):
def setUp(self):
call_command('log_action_create', interactive=False, verbosity=0)
self.taeyeon = factory.create_user(telephone="0841299999")
self.jessica = factory.create_user()
self.invite_group = factory.create_invite_group()
self.group_r = factory.create_group_type_report_type()
self.group_a = factory.create_group_type_report_type()
self.invite_group.groups.add(self.group_r)
self.invite_group.groups.add(self.group_a)
self.invitation_code = self.invite_group.code
def test_get_autority_by_no_invitation_code(self):
response = self.client.get(reverse('get_group_by_invitation_code'))
self.assertEqual(response.status_code, 400)
def test_get_autority_by_wrong_invitation_code(self):
params = { 'invitationCode': '21123' }
response = self.client.get(reverse('get_group_by_invitation_code'), params)
self.assertEqual(response.status_code, 400)
def test_get_authority_by_invitation_code(self):
params = { 'invitationCode': self.invitation_code }
response = self.client.get(reverse('get_group_by_invitation_code'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['name'], self.invite_group.name)
def test_registration_user(self):
params = {
'firstName': 'yoona',
'lastName': 'im',
'serialNumber': '0000000000530',
'telephone': '0800000530',
'group': self.invitation_code
}
response = self.client.post(reverse('user_register_by_group'), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertEqual(response_json['firstName'], 'yoona')
self.assertEqual(response_json['lastName'], 'im')
self.assertEqual(response_json['status'], USER_STATUS_ADDITION_VOLUNTEER)
user = User.objects.latest('id')
self.assertEqual(user.first_name, 'yoona')
self.assertEqual(user.last_name, 'im')
self.assertEqual(user.status, USER_STATUS_ADDITION_VOLUNTEER)
self.assertEqual(user.groups.count(), 2)
def test_registration_user_with_status(self):
params = {
'firstName': 'yoona',
'lastName': 'im',
'serialNumber': '0000000000530',
'telephone': '0800000530',
'group': self.invitation_code,
'status': USER_STATUS_PODD
}
response = self.client.post(reverse('user_register_by_group'), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertEqual(response_json['firstName'], 'yoona')
self.assertEqual(response_json['lastName'], 'im')
self.assertEqual(response_json['status'], USER_STATUS_PODD)
user = User.objects.latest('id')
self.assertEqual(user.first_name, 'yoona')
self.assertEqual(user.last_name, 'im')
self.assertEqual(user.status, USER_STATUS_PODD)
self.assertEqual(user.groups.count(), 2)
def test_registration_user_with_no_authority(self):
params = {
'firstName': 'yoona',
'lastName': 'im',
'serialNumber': '0000000000530',
'telephone': '0800000530',
}
response = self.client.post(reverse('user_register_by_group'), params)
self.assertEqual(response.status_code, 400)
def test_registration_user_with_same_serial_number(self):
params = {
'firstName': 'krystal',
'lastName': 'jung',
'serialNumber': self.taeyeon.serial_number,
'telephone': '0800000530',
}
response = self.client.post(reverse('user_register_by_group'), params)
self.assertEqual(response.status_code, 400)
def test_registration_user_with_same_telephone(self):
params = {
'firstName': 'krystal',
'lastName': 'jung',
'serialNumber': '1411900088888',
'telephone': '0841299999',
}
response = self.client.post(reverse('user_register_by_group'), params)
self.assertEqual(response.status_code, 400)
'''
class TestApiForgotPassword(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user(serial_number='0000000000309')
def test_post_forgot_password_no_serial_number_and_no_email(self):
response = self.client.post(reverse('user_forgot_password'))
self.assertEqual(response.status_code, 400)
def test_post_forgot_password_wrong_serial_number(self):
params = {
'serialNumber': '0000000000530'
}
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 400)
def test_post_forgot_password_wrong_email(self):
params = {
'email': '12345678@gmail.com'
}
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 400)
def test_post_forgot_password_serial_number(self):
params = {
'serialNumber': self.taeyeon.serial_number
}
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 200)
def test_post_forgot_password_email(self):
params = {
'email': self.taeyeon.email
}
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 200)
class TestApiLoginWithCode(APITestCase):
def setUp(self):
call_command('log_action_create', interactive=False, verbosity=0)
self.taeyeon = factory.create_user(serial_number='0000000000309')
self.jessica = factory.create_user(serial_number='0000000000418')
self.code1 = factory.create_user_code(user=self.jessica)
def test_post_forgot_password_no_serial_number(self):
response = self.client.post(reverse('user_forgot_password'))
self.assertEqual(response.status_code, 400)
def test_post_forgot_password_wrong_serial_number(self):
params = { 'serialNumber': '0000000000530' }
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 400)
def test_post_forgot_password_serial_number(self):
params = { 'serialNumber': self.taeyeon.serial_number }
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertIsNotNone(response_json['uid'])
self.assertIsNotNone(response_json['token'])
uid = response_json['uid']
token = response_json['token']
user_code = UserCode.objects.latest('id')
params = { 'code': user_code.code }
response = self.client.post(reverse('user_code_login', args=[uid, token]), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(response_json['firstName'], self.taeyeon.first_name)
self.assertEqual(response_json['lastName'], self.taeyeon.last_name)
def test_post_forgot_password_serial_number_wrong_user_code(self):
params = { 'serialNumber': self.taeyeon.serial_number }
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertIsNotNone(response_json['uid'])
self.assertIsNotNone(response_json['token'])
uid = response_json['uid']
token = response_json['token']
params = { 'code': self.code1.code }
response = self.client.post(reverse('user_code_login', args=[uid, token]), params)
self.assertEqual(response.status_code, 400)
def test_post_forgot_password_serial_number_used_user_code(self):
params = { 'serialNumber': self.taeyeon.serial_number }
response = self.client.post(reverse('user_forgot_password'), params)
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertIsNotNone(response_json['uid'])
self.assertIsNotNone(response_json['token'])
uid = response_json['uid']
token = response_json['token']
user_code = UserCode.objects.latest('id')
params = { 'code': user_code.code }
response = self.client.post(reverse('user_code_login', args=[uid, token]), params)
self.assertEqual(response.status_code, 200)
params = { 'code': user_code.code }
response = self.client.post(reverse('user_code_login', args=[uid, token]), params)
self.assertEqual(response.status_code, 400)
class TestApiAuthority(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user(is_staff=True)
self.jessica = factory.create_user()
self.authority1 = factory.create_authority()
self.authority1_1 = factory.create_authority()
self.authority1_1.parent = self.authority1
self.authority1_1.save()
def test_get_list_authority(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
response = self.client.get(reverse('authority-list'))
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
self.assertEqual(len(response_json), 2)
authority1 = response_json[0]
self.assertEqual(authority1['id'], self.authority1.id)
# self.assertEqual(authority1['parent'], None)
authority2 = response_json[1]
self.assertEqual(authority2['id'], self.authority1_1.id)
# self.assertEqual(authority2['parent'], self.authority1.id)
# def test_get_list_authority_filter_parent(self):
# self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
# params = {
# 'parentId': self.authority1.id,
# }
# response = self.client.get(reverse('authority-list'), params)
# self.assertEqual(response.status_code, 200)
# response_json = json.loads(response.content)
# self.assertEqual(len(response_json), 1)
# authority1 = response_json[0]
# self.assertEqual(authority1['id'], self.authority1_1.id)
# self.assertEqual(authority1['parent'], self.authority1.id)
# def test_get_list_authority_filter_none_parent(self):
# self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
# params = {
# 'parentId': None,
# }
# response = self.client.get(reverse('authority-list'), params)
# self.assertEqual(response.status_code, 200)
# response_json = json.loads(response.content)
# self.assertEqual(len(response_json), 1)
# authority1 = response_json[0]
# self.assertEqual(authority1['id'], self.authority1.id)
# self.assertEqual(authority1['parent'], None)
def test_api_delete_authority_forbidden(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.jessica.auth_token.key)
response = self.client.delete(reverse('authority-detail', args=[self.authority1.id]))
self.assertEqual(response.status_code, 403)
class TestApiRegistrationByUserDevice(APITestCase):
def setUp(self):
call_command('log_action_create', interactive=False, verbosity=0)
self.domain_id = settings.CURRENT_DOMAIN_ID
self.authority = Authority.objects.create(code='public_%s' % settings.CURRENT_DOMAIN_ID,
name='public_%s' % settings.CURRENT_DOMAIN_ID)
def test_registration_user_by_gcm_reg_id(self):
params = {
'deviceId': '123456789',
'brand': 'im-mobile',
'model': 'im4',
'gcmRegId': 'FA91bEWLspNo7KZaBjJjMZAHaRPpl3HMOrNAq995twkB2v3t_'
}
response = self.client.post(reverse('user_register_by_user_device', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertTrue('Anonymous' in response_json['username'])
self.assertEqual(response_json['status'], USER_STATUS_ADDITION_VOLUNTEER)
user = User.objects.latest('id')
self.assertTrue('Anonymous' in user.username)
self.assertEqual(user.status, USER_STATUS_ADDITION_VOLUNTEER)
self.assertTrue(user.is_anonymous)
self.assertTrue(user.is_public)
device = UserDevice.objects.latest('id')
self.assertEqual(device.device_id, '123456789')
self.assertEqual(device.brand, 'im-mobile')
self.assertEqual(device.model, 'im4')
self.assertEqual(device.gcm_reg_id, 'FA91bEWLspNo7KZaBjJjMZAHaRPpl3HMOrNAq995twkB2v3t_')
authority = Authority.objects.get(code='public_%s' % settings.CURRENT_DOMAIN_ID)
self.assertEqual(authority.name, 'public_%s' % settings.CURRENT_DOMAIN_ID)
self.assertEqual(authority.users.filter(id=user.id).count(), 1)
def test_registration_user_by_apns_reg_id(self):
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
'apnsRegId': 'e38ee577fd3e0dd2'
}
response = self.client.post(reverse('user_register_by_user_device', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertTrue('Anonymous' in response_json['username'])
self.assertEqual(response_json['status'], USER_STATUS_ADDITION_VOLUNTEER)
user = User.objects.latest('id')
self.assertTrue('Anonymous' in user.username)
self.assertEqual(user.status, USER_STATUS_ADDITION_VOLUNTEER)
self.assertTrue(user.is_anonymous)
self.assertTrue(user.is_public)
device = UserDevice.objects.latest('id')
self.assertEqual(device.device_id, '987654321')
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
self.assertEqual(device.apns_reg_id, 'e38ee577fd3e0dd2')
authority = Authority.objects.get(code='public_%s' % settings.CURRENT_DOMAIN_ID)
self.assertEqual(authority.name, 'public_%s' % settings.CURRENT_DOMAIN_ID)
self.assertEqual(authority.users.filter(id=user.id).count(), 1)
def test_registration_user_with_no_gcm_reg_id_and_no_apns_id(self):
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
}
response = self.client.post(reverse('user_register_by_user_device', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertTrue('Anonymous' in response_json['username'])
self.assertEqual(response_json['status'], USER_STATUS_ADDITION_VOLUNTEER)
user = User.objects.latest('id')
self.assertTrue('Anonymous' in user.username)
self.assertEqual(user.status, USER_STATUS_ADDITION_VOLUNTEER)
self.assertTrue(user.is_anonymous)
self.assertTrue(user.is_public)
device = UserDevice.objects.latest('id')
self.assertEqual(device.device_id, '987654321')
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
def test_registration_user_same_device(self):
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
}
response = self.client.post(reverse('user_register_by_user_device', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertTrue('Anonymous' in response_json['username'])
self.assertEqual(response_json['status'], USER_STATUS_ADDITION_VOLUNTEER)
id = response_json['id']
username = response_json['username']
response = self.client.post(reverse('user_register_by_user_device', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 201)
user = User.objects.latest('id')
self.assertEqual(user.id, id)
self.assertEqual(user.username, username)
device = UserDevice.objects.latest('id')
self.assertEqual(device.device_id, '987654321')
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
def test_registration_user_with_gcm_reg_id_and_apns_id(self):
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
'gcmRegId': 'FA91bEWLspNo7KZaBjJjMZAHaRPpl3HMOrNAq995twkB2v3t_',
'apnsRegId': 'e38ee577fd3e0dd2'
}
response = self.client.post(reverse('user_register_by_user_device', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 400)
'''
class TestApiRegistrationByFacebook(APITestCase):
def setUp(self):
call_command('log_action_create', interactive=False, verbosity=0)
self.domain_id = 1
@patch('facebook.GraphAPI.get_object', mock_facebook_graph_get_object)
@patch('accounts.api.upload_to_s3', mock_upload_to_s3)
def test_api_facebook_connect(self):
params = {
'facebook_access_token': 'CAACEdEose0cBAEPHQz2q46MV8a4m6Lg2',
}
response = self.client.post(reverse('facebook_connect', args=[self.domain_id]), params)
self.assertEqual(response.status_code, 201)
response_json = json.loads(response.content)
self.assertEqual(response_json['email'], 'taeyeon_kim@hotmail.com')
self.assertEqual(response_json['firstName'], 'Taeyeon Kim')
self.assertEqual(response_json['avatarUrl'], 'https://fbcdn-profile-a.akamaihd.net/hprofile-ak-xap1/v/t1.0-1/p200x200/1469737_693160797374375_1503926674_n.jpg?oh=3f0222140bc6e623991454b0c1010175&oe=56C75B45&__gda__=1452604903_646f4de4341bdcf0124ab82dee6f3d52')
self.assertFalse(response_json['isAnonymous'])
user = User.objects.latest('id')
self.assertEqual(user.username, 'taeyeon_kim@hotmail.com')
self.assertEqual(user.email, 'taeyeon_kim@hotmail.com')
self.assertEqual(user.first_name, 'Taeyeon Kim')
self.assertEqual(user.avatar_url, 'https://fbcdn-profile-a.akamaihd.net/hprofile-ak-xap1/v/t1.0-1/p200x200/1469737_693160797374375_1503926674_n.jpg?oh=3f0222140bc6e623991454b0c1010175&oe=56C75B45&__gda__=1452604903_646f4de4341bdcf0124ab82dee6f3d52')
self.assertEqual(user.fbuid, '603719628')
self.assertFalse(user.is_anonymous)
self.assertTrue(user.is_public)
authority = Authority.objects.get(code='public_1')
self.assertEqual(authority.name, 'public_1')
self.assertEqual(authority.users.filter(id=user.id).count(), 1)
'''
class TestApiUserDevice(APITestCase):
def setUp(self):
self.taeyeon = factory.create_user(status='VOLUNTEER')
self.jessica = factory.create_user(status='VOLUNTEER')
self.anonymous530 = factory.create_user(is_public=True, is_anonymous=True)
self.device1 = factory.create_user_device(user=self.jessica)
self.device2 = factory.create_user_device(user=self.anonymous530)
def test_post_update_user_device(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(device_id='987654321')
self.assertEqual(device.device_id, '987654321')
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
self.assertEqual(device.user, self.taeyeon)
def test_post_update_user_device_with_gcm_reg_id(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'deviceId': '123456789',
'brand': 'im-mobile',
'model': 'im4',
'gcmRegId': 'FA91bEWLspNo7KZaBjJjMZAHaRPpl3HMOrNAq995twkB2v3t_'
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(device_id='123456789')
self.assertEqual(device.device_id, '123456789')
self.assertEqual(device.brand, 'im-mobile')
self.assertEqual(device.model, 'im4')
self.assertEqual(device.gcm_reg_id, 'FA91bEWLspNo7KZaBjJjMZAHaRPpl3HMOrNAq995twkB2v3t_')
self.assertEqual(device.user, self.taeyeon)
def test_post_update_user_device_with_apns_reg_id(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
'apnsRegId': 'e38ee577fd3e0dd2'
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(device_id='987654321')
self.assertEqual(device.device_id, '987654321')
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
self.assertEqual(device.apns_reg_id, 'e38ee577fd3e0dd2')
self.assertEqual(device.user, self.taeyeon)
def test_post_update_user_device_with_gcm_reg_id_and_apns_reg_id(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
'gcmRegId': 'FA91bEWLspNo7KZaBjJjMZAHaRPpl3HMOrNAq995twkB2v3t_',
'apnsRegId': 'e38ee577fd3e0dd2'
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 400)
def test_post_update_same_user_device(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'deviceId': self.device1.device_id,
'brand': 'iphone',
'model': 'iphone4',
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(device_id=self.device1.device_id)
self.assertEqual(device.device_id, self.device1.device_id)
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
self.assertEqual(device.user, self.taeyeon)
def test_post_update_same_anonymous_user_device(self):
self.client.credentials(HTTP_AUTHORIZATION = 'Token ' + self.taeyeon.auth_token.key)
params = {
'deviceId': self.device2.device_id,
'brand': 'iphone',
'model': 'iphone4',
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 200)
device = UserDevice.objects.get(device_id=self.device2.device_id)
self.assertEqual(device.device_id, self.device2.device_id)
self.assertEqual(device.brand, 'iphone')
self.assertEqual(device.model, 'iphone4')
self.assertEqual(device.user, self.taeyeon)
user = User.objects.get(id=self.anonymous530.id)
self.assertFalse(user.is_active)
def test_cannot_access_user_update_device(self):
params = {
'deviceId': '987654321',
'brand': 'iphone',
'model': 'iphone4',
}
response = self.client.post(reverse('user_update_device'), params)
self.assertEqual(response.status_code, 401)
| 43.975416
| 268
| 0.68141
| 6,818
| 60,818
| 5.847023
| 0.061455
| 0.116644
| 0.09635
| 0.069836
| 0.869585
| 0.833889
| 0.796313
| 0.765659
| 0.740474
| 0.728709
| 0
| 0.038576
| 0.198675
| 60,818
| 1,382
| 269
| 44.007236
| 0.779419
| 0.021753
| 0
| 0.601852
| 0
| 0.00823
| 0.148285
| 0.021742
| 0
| 0
| 0
| 0
| 0.299383
| 1
| 0.100823
| false
| 0.050412
| 0.015432
| 0.002058
| 0.134774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b0d777bb68e3fc806dc951da6b7dafbbaaaa6b28
| 11,361
|
py
|
Python
|
backend/test/test_integration/test_file_manager.py
|
Software-Engineering-Bachelor-Project/mycroft
|
8ca3b6bfaa7b573f67def06c637f3c57838440a4
|
[
"MIT"
] | 5
|
2020-03-01T11:17:09.000Z
|
2021-07-08T20:45:47.000Z
|
backend/test/test_integration/test_file_manager.py
|
Software-Engineering-Bachelor-Project/mycroft
|
8ca3b6bfaa7b573f67def06c637f3c57838440a4
|
[
"MIT"
] | 245
|
2020-03-28T11:59:12.000Z
|
2020-05-26T10:05:22.000Z
|
backend/test/test_integration/test_file_manager.py
|
Software-Engineering-Bachelor-Project/mycroft
|
8ca3b6bfaa7b573f67def06c637f3c57838440a4
|
[
"MIT"
] | 5
|
2020-02-03T08:15:13.000Z
|
2020-04-15T07:22:47.000Z
|
from django.test import TestCase
from unittest.mock import patch
# Import module
from backend.file_manager import *
class GetSourceFolders(TestCase):
def setUp(self) -> None:
"""
Set up a complex file structure.
"""
self.rid = create_root_folder(path='home/user/', name='test_folder')
self.sid1 = create_subfolder(parent_fid=self.rid, name='test_subfolder')
self.sid2 = create_subfolder(parent_fid=self.rid, name='another_test_subfolder')
self.sid3 = create_subfolder(parent_fid=self.sid1, name='third_test_subfolder')
self.pid = create_project(name="test_project")
add_folder_to_project(self.sid1, self.pid)
def test_basic_call(self):
"""
Test simple call.
"""
code, res = get_source_folders(data={PROJECT_ID: self.pid})
self.assertEqual(code, 200)
self.assertEqual(len(res[FOLDERS]), 2)
self.assertEqual(len(res[FOLDER_IDS]), 1)
class GetFoldersTest(TestCase):
def setUp(self) -> None:
"""
Set up a complex file structure.
"""
self.pid = create_project(name="test_project")
self.rid = create_root_folder(path='home/user/', name='test_folder')
self.sid1 = create_subfolder(parent_fid=self.rid, name='test_subfolder')
self.sid2 = create_subfolder(parent_fid=self.rid, name='another_test_subfolder')
self.sid3 = create_subfolder(parent_fid=self.sid1, name='third_test_subfolder')
add_folder_to_project(fid=self.rid, pid=self.pid)
def test_complex_file_structure(self):
"""
Test with a complex file structure.
"""
code, res = get_folders(data={PROJECT_ID: self.pid})
self.assertEqual(code, 200)
self.assertEqual(len(res[FOLDERS]), 4)
def test_redundant_parameter(self):
"""
Test with a redundant parameter.
"""
code, res = get_folders(data={PROJECT_ID: self.pid, FOLDER_ID: 42})
self.assertEqual(code, 200)
self.assertEqual(len(res[FOLDERS]), 4)
class AddFoldersTest(TestCase):
def setUp(self) -> None:
"""
Setup a test project.
"""
self.pid = create_project('test_project')
self.fid = create_root_folder(path='home/user/', name='test_folder')
def test_simple_call(self):
"""
Test adding a folder to a project.
"""
code, res = add_folder({PROJECT_ID: self.pid, FOLDER_ID: self.fid})
self.assertEqual(code, 200)
self.assertEqual(res, {FOLDERS: [], FOLDER_IDS: []})
def test_bad_file_path(self):
"""
Test with a bad file path.
"""
code, res = add_folder({FILE_PATH: 'test_folder'})
self.assertEqual(code, 400)
self.assertEqual(res, {})
def test_non_existing_project(self):
"""
Test with a project id that doesn't exist.
"""
code, res = add_folder(data={PROJECT_ID: 42, FOLDER_ID: self.fid})
self.assertEqual(code, 204)
self.assertEqual(res, {})
def test_non_existing_folder(self):
"""
Test with a folder id that doesn't exist.
"""
code, res = add_folder(data={PROJECT_ID: self.pid, FOLDER_ID: 42})
self.assertEqual(code, 204)
self.assertEqual(res, {})
class RemoveFoldersTest(TestCase):
def setUp(self) -> None:
"""
Setup a test project.
"""
self.pid = create_project('test_project')
self.fid = create_root_folder(path='home/user/', name='test_folder')
def test_simple_call(self):
"""
Test removing a folder from a project.
"""
code, res = remove_folder({PROJECT_ID: self.pid, FOLDER_ID: self.fid})
self.assertEqual(code, 200)
self.assertEqual(res, {FOLDERS: [], FOLDER_IDS: []})
def test_bad_file_path(self):
"""
Test with a bad file path.
"""
code, res = remove_folder({FILE_PATH: 'test_folder'})
self.assertEqual(code, 400)
self.assertEqual(res, {})
def test_non_existing_project(self):
"""
Test with a project id that doesn't exist.
"""
code, res = remove_folder(data={PROJECT_ID: 42, FOLDER_ID: self.fid})
self.assertEqual(code, 204)
self.assertEqual(res, {})
def test_non_existing_folder(self):
"""
Test with a folder id that doesn't exist.
"""
code, res = remove_folder(data={PROJECT_ID: self.pid, FOLDER_ID: 42})
self.assertEqual(code, 204)
self.assertEqual(res, {})
class GetClipsTest(TestCase):
@patch('backend.database_wrapper.create_hash_sum')
def setUp(self, mock_create_hash_sum) -> None:
"""
Setup a test project.
"""
mock_create_hash_sum.return_value = '1234'
self.cam_name = 'test_camera'
self.lat = Decimal(value="13.37")
self.lon = Decimal(value="0.42")
self.st = timezone.datetime(2020, 1, 17, tzinfo=pytz.timezone(settings.TIME_ZONE))
self.et = timezone.datetime(2020, 1, 18, tzinfo=pytz.timezone(settings.TIME_ZONE))
self.pid = create_project(name="test_project")
self.rid = create_root_folder(path='home/user/', name='test_folder')
self.sid1 = create_subfolder(parent_fid=self.rid, name='test_subfolder')
self.sid2 = create_subfolder(parent_fid=self.rid, name='another_test_subfolder')
self.sid3 = create_subfolder(parent_fid=self.sid1, name='third_test_subfolder')
self.cid1 = create_clip(fid=self.rid, clip_name="test_clip1", video_format="tvf", start_time=self.st,
end_time=self.et, latitude=self.lat, longitude=self.lon, width=256, height=240,
frame_rate=42.0, camera_name=self.cam_name)
self.cid2 = create_clip(fid=self.sid1, clip_name="test_clip2", video_format="tvf", start_time=self.st,
end_time=self.et, latitude=self.lat, longitude=self.lon, width=256, height=240,
frame_rate=42.0, camera_name=self.cam_name)
self.cid3 = create_clip(fid=self.sid3, clip_name="test_clip3", video_format="tvf", start_time=self.st,
end_time=self.et, latitude=self.lat, longitude=self.lon, width=256, height=240,
frame_rate=42.0, camera_name=self.cam_name)
add_folder_to_project(fid=self.rid, pid=self.pid)
def test_complex_file_structure(self):
"""
Test retrieving all clips in a project.
"""
code, res = get_clips(data={PROJECT_ID: self.pid})
self.assertEqual(code, 200)
self.assertEqual(len(res[CLIPS]), 3)
def test_no_clips_in_project(self):
"""
Test when there are no clips in project.
"""
pid = create_project(name="test_project2")
code, res = get_clips(data={PROJECT_ID: pid})
self.assertEqual(code, 200)
self.assertEqual(len(res[CLIPS]), 0)
def test_non_existing_project(self):
"""
Test with a project id that doesn't exist.
"""
code, res = get_clips(data={PROJECT_ID: 42})
self.assertEqual(code, 204)
self.assertEqual(res, {})
def test_bad_request(self):
"""
Test with bad request.
"""
code, res = get_clips(data={FOLDER_ID: self.pid})
self.assertEqual(code, 400)
self.assertEqual(res, {})
def test_object_detection_set(self):
"""
Test right objectdetection_set is returned.
"""
create_object_detection(cid=self.cid1, sample_rate=60, start_time=self.st, end_time=self.et,
objects=[("car", self.st)])
create_object_detection(cid=self.cid1, sample_rate=50, start_time=self.st, end_time=self.et,
objects=[("car", self.st), ("bicycle", self.st)])
create_object_detection(cid=self.cid1, sample_rate=60, start_time=self.st, end_time=self.et,
objects=[("bicycle", self.st)])
create_object_detection(cid=self.cid2, sample_rate=10, start_time=self.st, end_time=self.et,
objects=[("person", self.st), ("person", self.st)])
code, res = get_clips(data={PROJECT_ID: self.pid})
self.assertEqual(code, 200)
self.assertEqual(res[CLIPS][0]['objectdetection_set'], {'rate': 50, 'objects': {'car': 1, 'bicycle': 1}})
self.assertEqual(res[CLIPS][1]['objectdetection_set'], {'rate': 10, 'objects': {'person': 2}})
self.assertEqual(res[CLIPS][2]['objectdetection_set'], None)
class GetFilesTest(TestCase):
@patch('backend.database_wrapper.create_hash_sum')
def setUp(self, mock_create_hash_sum) -> None:
"""
Setup a test project.
"""
mock_create_hash_sum.return_value = '1234'
self.cam_name = 'test_camera'
self.lat = Decimal(value="13.37")
self.lon = Decimal(value="0.42")
self.st = timezone.datetime(2020, 1, 17, tzinfo=pytz.timezone(settings.TIME_ZONE))
self.et = timezone.datetime(2020, 1, 18, tzinfo=pytz.timezone(settings.TIME_ZONE))
self.pid = create_project(name="test_project")
self.rid = create_root_folder(path='home/user/', name='test_folder')
self.sid1 = create_subfolder(parent_fid=self.rid, name='test_subfolder')
self.sid2 = create_subfolder(parent_fid=self.rid, name='another_test_subfolder')
self.sid3 = create_subfolder(parent_fid=self.sid1, name='third_test_subfolder')
self.cid1 = create_clip(fid=self.rid, clip_name="test_clip1", video_format="tvf", start_time=self.st,
end_time=self.et, latitude=self.lat, longitude=self.lon, width=256, height=240,
frame_rate=42.0, camera_name=self.cam_name)
self.cid2 = create_clip(fid=self.sid1, clip_name="test_clip2", video_format="tvf", start_time=self.st,
end_time=self.et, latitude=self.lat, longitude=self.lon, width=256, height=240,
frame_rate=42.0, camera_name=self.cam_name)
self.cid3 = create_clip(fid=self.sid3, clip_name="test_clip3", video_format="tvf", start_time=self.st,
end_time=self.et, latitude=self.lat, longitude=self.lon, width=256, height=240,
frame_rate=42.0, camera_name=self.cam_name)
add_folder_to_project(fid=self.rid, pid=self.pid)
def test_complex_file_structure(self):
"""
Test retrieving all clips in a project.
"""
code, res = get_files(data={PROJECT_ID: self.pid})
self.assertEqual(code, 200)
self.assertEqual(len(res[CLIPS]), 3)
self.assertEqual(len(res[FOLDERS]), 4)
def test_non_existing_project(self):
"""
Test with a project id that doesn't exist.
"""
code, res = get_files(data={PROJECT_ID: 42})
self.assertEqual(code, 204)
self.assertEqual(res, {})
def test_bad_request(self):
"""
Test with bad request.
"""
code, res = get_files(data={FOLDER_ID: self.pid})
self.assertEqual(code, 400)
self.assertEqual(res, {})
| 40.72043
| 113
| 0.614206
| 1,463
| 11,361
| 4.56596
| 0.105947
| 0.094311
| 0.054042
| 0.043114
| 0.880689
| 0.875599
| 0.875599
| 0.864072
| 0.851497
| 0.841168
| 0
| 0.028287
| 0.256315
| 11,361
| 279
| 114
| 40.72043
| 0.762339
| 0.074817
| 0
| 0.731707
| 0
| 0
| 0.079509
| 0.016908
| 0
| 0
| 0
| 0
| 0.256098
| 1
| 0.152439
| false
| 0
| 0.018293
| 0
| 0.207317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b02a4fcf2308432cc995c1754016b9fcf51e6ac7
| 7,983
|
py
|
Python
|
tests/verifiers_tests/utils.py
|
nathzi1505/DNNV
|
16c6e6ecb681ce66196f9274d4a43eede8686319
|
[
"MIT"
] | 33
|
2019-12-13T18:54:52.000Z
|
2021-11-16T06:29:29.000Z
|
tests/verifiers_tests/utils.py
|
nathzi1505/DNNV
|
16c6e6ecb681ce66196f9274d4a43eede8686319
|
[
"MIT"
] | 28
|
2020-01-30T14:06:03.000Z
|
2022-01-27T01:07:37.000Z
|
tests/verifiers_tests/utils.py
|
nathzi1505/DNNV
|
16c6e6ecb681ce66196f9274d4a43eede8686319
|
[
"MIT"
] | 14
|
2020-04-08T01:57:00.000Z
|
2021-11-26T09:35:02.000Z
|
import os
import unittest
from dnnv import nn
from dnnv import properties
from dnnv.properties import Symbol
from dnnv.properties.context import get_context
from dnnv.verifiers import SAT, UNSAT, UNKNOWN
from tests.utils import network_artifact_dir, property_artifact_dir
RUNS_PER_PROP = int(os.environ.get("_DNNV_TEST_RUNS_PER_PROP", "1"))
class VerifierTests:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verifier = None
self.is_complete = False
self.initialize()
def initialize(self):
raise NotImplementedError()
def setUp(self):
self.reset_property_context()
for varname in ["SHIFT", "INPUT_LAYER", "OUTPUT_LAYER"]:
if varname in os.environ:
del os.environ[varname]
def reset_property_context(self):
get_context().reset()
def test_sum_gt_one_localrobustness_shift_left_unsat(self):
os.environ["SHIFT"] = "-100"
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "sum_gt_one.onnx")
phi = properties.parse(
property_artifact_dir / "regression_localrobustness_0.py"
)
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
def test_sum_gt_one_localrobustness_shift_right_unsat(self):
os.environ["SHIFT"] = "100"
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "sum_gt_one.onnx")
phi = properties.parse(
property_artifact_dir / "regression_localrobustness_0.py"
)
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
def test_sum_gt_one_localrobustness_no_shift_sat(self):
os.environ["SHIFT"] = "0"
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "sum_gt_one.onnx")
phi = properties.parse(
property_artifact_dir / "regression_localrobustness_0.py"
)
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
if self.is_complete:
self.assertEqual(result, SAT)
else:
self.assertIn(result, [UNKNOWN, SAT])
def test_const_zero_localrobustness(self):
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "const_zero.onnx")
phi = properties.parse(
property_artifact_dir / "regression_localrobustness_0.py"
)
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
def test_const_one_localrobustness(self):
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "const_one.onnx")
phi = properties.parse(
property_artifact_dir / "regression_localrobustness_0.py"
)
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
def test_a_gt_b_localrobustness_unsat(self):
os.environ["OUTPUT_LAYER"] = "-1"
os.environ["SHIFT"] = "np.asarray([[100,0]], dtype=np.float32)"
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "a_gt_b.onnx")
phi = properties.parse(property_artifact_dir / "class_localrobustness_0.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "a_gt_b.onnx")
phi = properties.parse(property_artifact_dir / "class_localrobustness_1.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
os.environ["SHIFT"] = "np.asarray([[0,100]], dtype=np.float32)"
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "a_gt_b.onnx")
phi = properties.parse(property_artifact_dir / "class_localrobustness_0.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "a_gt_b.onnx")
phi = properties.parse(property_artifact_dir / "class_localrobustness_1.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
def test_a_gt_b_localrobustness_sat(self):
os.environ["OUTPUT_LAYER"] = "-1"
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "a_gt_b.onnx")
phi = properties.parse(property_artifact_dir / "class_localrobustness_0.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
if self.is_complete:
self.assertEqual(result, SAT)
else:
self.assertIn(result, [UNKNOWN, SAT])
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "a_gt_b.onnx")
phi = properties.parse(property_artifact_dir / "class_localrobustness_1.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
if self.is_complete:
self.assertEqual(result, SAT)
else:
self.assertIn(result, [UNKNOWN, SAT])
def test_const_zero_ge1_sat(self):
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "const_zero.onnx")
phi = properties.parse(property_artifact_dir / "output_ge1_0.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
if self.is_complete:
self.assertEqual(result, SAT)
else:
self.assertIn(result, [UNKNOWN, SAT])
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "const_zero.onnx")
phi = properties.parse(property_artifact_dir / "output_ge1_1.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
if self.is_complete:
self.assertEqual(result, SAT)
else:
self.assertIn(result, [UNKNOWN, SAT])
def test_const_one_ge1_unsat(self):
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "const_one.onnx")
phi = properties.parse(property_artifact_dir / "output_ge1_0.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
for i in range(RUNS_PER_PROP):
self.reset_property_context()
dnn = nn.parse(network_artifact_dir / "const_one.onnx")
phi = properties.parse(property_artifact_dir / "output_ge1_1.py")
phi.concretize(N=dnn.simplify())
result, _ = self.verifier.verify(phi)
self.assertEqual(result, UNSAT)
| 42.462766
| 88
| 0.613804
| 944
| 7,983
| 4.90572
| 0.09322
| 0.07601
| 0.04038
| 0.082919
| 0.858346
| 0.848413
| 0.837616
| 0.828979
| 0.828979
| 0.828979
| 0
| 0.007514
| 0.283102
| 7,983
| 187
| 89
| 42.68984
| 0.801677
| 0
| 0
| 0.752941
| 0
| 0
| 0.095328
| 0.047225
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.076471
| false
| 0
| 0.047059
| 0
| 0.129412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b06276cf4e214eefc9f1fbf10b7945e3d81db557
| 2,496
|
py
|
Python
|
2017/day_1/part_2.py
|
Atropos148/Advent-of-Code
|
1a892a7ba1c751543bfbec822e348838368f13a0
|
[
"MIT"
] | null | null | null |
2017/day_1/part_2.py
|
Atropos148/Advent-of-Code
|
1a892a7ba1c751543bfbec822e348838368f13a0
|
[
"MIT"
] | null | null | null |
2017/day_1/part_2.py
|
Atropos148/Advent-of-Code
|
1a892a7ba1c751543bfbec822e348838368f13a0
|
[
"MIT"
] | null | null | null |
puzzle_input = '823936645345581272695677318513459491834641129844393742672553544439126314399846773234845535593355348931499496184839582118817689171948635864427852215325421433717458975771369522138766248225963242168658975326354785415252974294317138511141826226866364555761117178764543435899886711426319675443679829181257496966219435831621565519667989898725836639626681645821714861443141893427672384716732765884844772433374798185955741311116365899659833634237938878181367317218635539667357364295754744829595842962773524584225427969467467611641591834876769829719248136613147351298534885563144114336211961674392912181735773851634298227454157885241769156811787611897349965331474217223461176896643242975397227859696554492996937235423272549348349528559432214521551656971136859972232854126262349381254424597348874447736545722261957871275935756764184378994167427983811716675476257858556464755677478725146588747147857375293675711575747132471727933773512571368467386151966568598964631331428869762151853634362356935751298121849281442128796517663482391226174256395515166361514442624944181255952124524815268864131969151433888721213595267927325759562132732586252438456569556992685896517565257787464673718221817783929691626876446423134331749327322367571432532857235214364221471769481667118117729326429556357572421333798517168997863151927281418238491791975399357393494751913155219862399959646993428921878798119215675548847845477994836744929918954159722827194721564121532315459611433157384994543332773796862165243183378464731546787498174844781781139571984272235872866886275879944921329959736315296733981313643956576956851762149275521949177991988236529475373595217665112434727744235789852852765675189342753695377219374791548554786671473733124951946779531847479755363363288448281622183736545494372344785112312749694167483996738384351293899149136857728545977442763489799693492319549773328626918874718387697878235744154491677922317518952687439655962477734559232755624943644966227973617788182213621899579391324399386146423427262874437992579573858589183571854577861459758534348533553925167947139351819511798829977371215856637215221838924612644785498936263849489519896548811254628976642391428413984281758771868781714266261781359762798'
test = '12131415'
total = 0
# can change between test and real input
input_data = puzzle_input
half_length = int(len(input_data)*0.5)
for index in range(len(input_data)):
if input_data[index-half_length] == input_data[index]:
total += int(input_data[index])
print(total)
| 156
| 2,207
| 0.964343
| 49
| 2,496
| 48.918367
| 0.489796
| 0.022528
| 0.017522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.900573
| 0.020833
| 2,496
| 15
| 2,208
| 166.4
| 0.080196
| 0.015224
| 0
| 0
| 0
| 0
| 0.894951
| 0.891694
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c69bb2bd13862bf6aaa93ba1172d2c8a71d3d8aa
| 4,145
|
py
|
Python
|
src/text_selection_tests/kld/kld_iterator_py/test_remove_nan_rows.py
|
stefantaubert/text-selection
|
4b3b49005cbeb2e9212ed94686d8e871c6c2c368
|
[
"MIT"
] | null | null | null |
src/text_selection_tests/kld/kld_iterator_py/test_remove_nan_rows.py
|
stefantaubert/text-selection
|
4b3b49005cbeb2e9212ed94686d8e871c6c2c368
|
[
"MIT"
] | null | null | null |
src/text_selection_tests/kld/kld_iterator_py/test_remove_nan_rows.py
|
stefantaubert/text-selection
|
4b3b49005cbeb2e9212ed94686d8e871c6c2c368
|
[
"MIT"
] | null | null | null |
import numpy as np
from text_selection.kld.kld_iterator import remove_nan_rows
# region empty
def test_s0_empty__returns_empty():
qk = np.array([], dtype=np.float64)
result = remove_nan_rows(qk, axis=0)
np.testing.assert_array_equal(result, np.empty(shape=(0,)))
def test_s0x0_empty_returns_empty():
qk = np.empty(shape=(0, 0), dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(0, 0)))
def test_s1x0_empty_returns_empty():
qk = np.array([[]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(1, 0)))
def test_s2x0_empty_returns_empty():
qk = np.array([[], []], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(2, 0)))
# endregion
# region normal
def test_s1_no_nan__returns_full_array():
qk = np.array([1], dtype=np.float64)
result = remove_nan_rows(qk, axis=0)
np.testing.assert_array_equal(result, np.array([1]))
def test_s2_no_nan__returns_full_array():
qk = np.array([1, 2], dtype=np.float64)
result = remove_nan_rows(qk, axis=0)
np.testing.assert_array_equal(result, np.array([1, 2]))
def test_s1x1_no_nan__returns_full_array():
qk = np.array([[1]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[1]]))
def test_s1x2_no_nan__returns_full_array():
qk = np.array([[1, 2]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[1, 2]]))
def test_s2x1_no_nan__returns_full_array():
qk = np.array([[1], [2]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[1], [2]]))
def test_s2x2_no_nan__returns_full_array():
qk = np.array([[1, 2], [2, 1]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[1, 2], [2, 1]]))
# endregion
# region all nan
def test_s1_all_nan__returns_empty():
qk = np.array([np.nan], dtype=np.float64)
result = remove_nan_rows(qk, axis=0)
np.testing.assert_array_equal(result, np.empty(shape=(0,)))
def test_s2_all_nan__returns_all_entries():
qk = np.array([np.nan, np.nan], dtype=np.float64)
result = remove_nan_rows(qk, axis=0)
np.testing.assert_array_equal(result, np.empty(shape=(0,)))
def test_s1x1_all_nan__returns_all_entries():
qk = np.array([[np.nan]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(0, 1)))
def test_s1x2_all_nan__returns_all_entries():
qk = np.array([[np.nan, np.nan]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(0, 2)))
def test_s2x1_all_nan__returns_all_entries():
qk = np.array([[np.nan], [np.nan]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(0, 1)))
def test_s2x2_all_nan__returns_all_entries():
qk = np.array([[np.nan, np.nan], [np.nan, np.nan]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.empty(shape=(0, 2)))
# endregion
# region one nan
def test_s2_one_nan__returns_all_entries():
qk = np.array([1.0, np.nan], dtype=np.float64)
result = remove_nan_rows(qk, axis=0)
np.testing.assert_array_equal(result, np.array([1.0, np.nan]))
def test_s1x2_one_nan__returns_all_entries():
qk = np.array([[1.0, np.nan]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[1.0, np.nan]]))
def test_s2x2_one_nan__returns_all_entries():
qk = np.array([[np.nan, 1], [1, np.nan]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[np.nan, 1], [1, np.nan]]))
# endregion
def test_s3x3_component_test():
qk = np.array([[1, 2, np.nan], [np.nan, np.nan, np.nan], [1, 0, 0]], dtype=np.float64)
result = remove_nan_rows(qk, axis=1)
np.testing.assert_array_equal(result, np.array([[1, 2, np.nan], [1, 0, 0]]))
| 31.641221
| 88
| 0.713631
| 722
| 4,145
| 3.808864
| 0.067867
| 0.073818
| 0.099273
| 0.145455
| 0.892727
| 0.874545
| 0.868364
| 0.868364
| 0.854909
| 0.842545
| 0
| 0.043383
| 0.115802
| 4,145
| 130
| 89
| 31.884615
| 0.706958
| 0.02316
| 0
| 0.329268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243902
| 1
| 0.243902
| false
| 0
| 0.02439
| 0
| 0.268293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c69c5e032318d0df5a8857c4f620b133847d4c0a
| 77
|
py
|
Python
|
blorp/__init__.py
|
jrdh/blorp-python
|
0f73e5cb5e319c7ce797c33e8349e566a29121d2
|
[
"MIT"
] | null | null | null |
blorp/__init__.py
|
jrdh/blorp-python
|
0f73e5cb5e319c7ce797c33e8349e566a29121d2
|
[
"MIT"
] | null | null | null |
blorp/__init__.py
|
jrdh/blorp-python
|
0f73e5cb5e319c7ce797c33e8349e566a29121d2
|
[
"MIT"
] | null | null | null |
from blorp.util import *
from blorp.handler import *
from blorp.app import *
| 19.25
| 27
| 0.766234
| 12
| 77
| 4.916667
| 0.5
| 0.457627
| 0.508475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155844
| 77
| 3
| 28
| 25.666667
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c6f43e9f34b714a8b8eab5262da07402ff41ab03
| 117
|
py
|
Python
|
setup.py
|
KOLANICH/read_version
|
601a036d115dd831319ed3c4700168e5c7e03228
|
[
"MIT"
] | null | null | null |
setup.py
|
KOLANICH/read_version
|
601a036d115dd831319ed3c4700168e5c7e03228
|
[
"MIT"
] | null | null | null |
setup.py
|
KOLANICH/read_version
|
601a036d115dd831319ed3c4700168e5c7e03228
|
[
"MIT"
] | null | null | null |
from setuptools import setup
from read_version import read_version
setup(version=read_version('read_version.py'))
| 23.4
| 46
| 0.82906
| 17
| 117
| 5.470588
| 0.411765
| 0.473118
| 0.387097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 117
| 4
| 47
| 29.25
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
05a75b39dfc4d7a5b3f311fce961f0e8f72e80c5
| 9,193
|
py
|
Python
|
lib_bgp_data/simulations/simulator/tests/system_tests/test_figure_4.py
|
jfuruness/lib_bgp_data
|
25f7d57b9e2101c7aefb325e8d728bd91f47d557
|
[
"BSD-3-Clause"
] | 16
|
2018-09-24T05:10:03.000Z
|
2021-11-29T19:18:59.000Z
|
lib_bgp_data/simulations/simulator/tests/system_tests/test_figure_4.py
|
jfuruness/lib_bgp_data
|
25f7d57b9e2101c7aefb325e8d728bd91f47d557
|
[
"BSD-3-Clause"
] | 4
|
2019-10-09T18:54:17.000Z
|
2021-03-05T14:02:50.000Z
|
lib_bgp_data/simulations/simulator/tests/system_tests/test_figure_4.py
|
jfuruness/lib_bgp_data
|
25f7d57b9e2101c7aefb325e8d728bd91f47d557
|
[
"BSD-3-Clause"
] | 3
|
2018-09-17T17:35:18.000Z
|
2020-03-24T16:03:31.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""This file contains system tests for the extrapolator.
For speciifics on each test, see the docstrings under each function.
"""
import pytest
from .graph_tester import Graph_Tester
#from ..tables import Hijack
from ....enums import Non_Default_Policies, Policies, Data_Plane_Conditions as Conds
from ...attacks.attack_classes import Subprefix_Hijack
from ...attacks.attack import Attack
__author__ = "Justin Furuness"
__credits__ = ["Justin Furuness"]
__Lisence__ = "BSD"
__maintainer__ = "Justin Furuness"
__email__ = "jfuruness@gmail.com"
__status__ = "Development"
class Test_Figure_4(Graph_Tester):
"""Tests all example graphs within our paper."""
def test_figure_4a(self):
r"""v3 example with ROV++v2
/44\
53 | 666
/ | \
/ | 87 \
54 |/ \ \
\ 33 99
\ /
22
"""
attack_types = [Subprefix_Hijack]
adopt_policies = [Non_Default_Policies.ROVPP_V2]
peer_rows = []
provider_customer_rows = [[44, 53],
[44, 33],
[44, 99],
[44, 666],
[53, 54],
[54, 22],
[33, 22],
[87, 33],
[87, 99]]
# Set adopting rows
bgp_ases = [54, 53, 22, 44, 87, 99, 666]
adopting_ases = [33]
adopting_rows = []
for bgp_as in bgp_ases:
adopting_rows.append([bgp_as, Policies.DEFAULT.value, False])
for adopting_as in adopting_ases:
adopting_rows.append([adopting_as, Policies.ROVPP_V2.value, True])
attacker = 666
victim = 99
exr_output = [{"asn": 44,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 99},
{"asn": 44,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 666},
{"asn": 666,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 44},
{"asn": 666,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": Conds.HIJACKED.value},
{"asn": 53,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 44},
{"asn": 53,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 44},
{"asn": 54,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 53},
{"asn": 54,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 53},
{"asn": 22,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 33},
{"asn": 22,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 54},
{"asn": 87,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 99},
{"asn": 33,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 87},
{"asn": 99,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": Conds.NOTHIJACKED.value },
{"asn": 99,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": Conds.NOTHIJACKED.value},
]
self._test_graph(attack_types=attack_types,
adopt_policies=adopt_policies,
peer_rows=peer_rows,
provider_customer_rows=provider_customer_rows,
adopting_rows=adopting_rows,
attacker=attacker,
victim=victim,
exr_output=exr_output)
def test_figure_4b(self):
r"""v3 example with ROV++v3
/44\
53 | 666
/ | \
/ | 87 \
54 |/ \ \
\ 33 99
\ /
22
"""
attack_types = [Subprefix_Hijack]
adopt_policies = [Non_Default_Policies.ROVPP_V3]
peer_rows = []
provider_customer_rows = [[44, 53],
[44, 33],
[44, 99],
[44, 666],
[53, 54],
[54, 22],
[33, 22],
[87, 33],
[87, 99]]
# Set adopting rows
bgp_ases = [54, 53, 22, 44, 87, 99, 666]
adopting_ases = [33]
adopting_rows = []
for bgp_as in bgp_ases:
adopting_rows.append([bgp_as, Policies.DEFAULT.value, False])
for adopting_as in adopting_ases:
adopting_rows.append([adopting_as, Policies.ROVPP_V3.value, True])
attacker = 666
victim = 99
exr_output = [{"asn": 44,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 99},
{"asn": 44,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 666},
{"asn": 666,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 44},
{"asn": 666,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": Conds.HIJACKED.value},
{"asn": 53,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 44},
{"asn": 53,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 44},
{"asn": 54,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 53},
{"asn": 54,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": 53},
{"asn": 22,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 33},
{"asn": 22,
"prefix": Attack.default_subprefix,
"origin": 99,
"received_from_asn": 33},
{"asn": 87,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 99},
{"asn": 33,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": 87},
{"asn": 33,
"prefix": Attack.default_subprefix,
"origin": 99,
"received_from_asn": 87},
{"asn": 99,
"prefix": Attack.default_prefix,
"origin": 99,
"received_from_asn": Conds.NOTHIJACKED.value },
{"asn": 99,
"prefix": Attack.default_subprefix,
"origin": 666,
"received_from_asn": Conds.NOTHIJACKED.value},
]
self._test_graph(attack_types=attack_types,
adopt_policies=adopt_policies,
peer_rows=peer_rows,
provider_customer_rows=provider_customer_rows,
adopting_rows=adopting_rows,
attacker=attacker,
victim=victim,
exr_output=exr_output)
| 37.67623
| 84
| 0.400631
| 732
| 9,193
| 4.755464
| 0.147541
| 0.099971
| 0.158288
| 0.103419
| 0.84085
| 0.840276
| 0.82821
| 0.827636
| 0.827636
| 0.809537
| 0
| 0.074995
| 0.501033
| 9,193
| 243
| 85
| 37.831276
| 0.683889
| 0.056456
| 0
| 0.880208
| 0
| 0
| 0.118548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010417
| false
| 0
| 0.026042
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
af0c78e9d353d52292636db22cb32086cfdd661f
| 13,463
|
py
|
Python
|
ppdpy/tests/test_expression_compiler.py
|
ericvoid/ppdpy
|
40ab60c485a09793f18aa530fa2b5f8435dc2bfc
|
[
"MIT"
] | 1
|
2020-05-16T21:29:20.000Z
|
2020-05-16T21:29:20.000Z
|
ppdpy/tests/test_expression_compiler.py
|
ericvoid/ppdpy
|
40ab60c485a09793f18aa530fa2b5f8435dc2bfc
|
[
"MIT"
] | null | null | null |
ppdpy/tests/test_expression_compiler.py
|
ericvoid/ppdpy
|
40ab60c485a09793f18aa530fa2b5f8435dc2bfc
|
[
"MIT"
] | 1
|
2020-05-16T22:32:01.000Z
|
2020-05-16T22:32:01.000Z
|
from unittest import TestCase
from ppdpy.expression_compiler import lex, compile, Id, Not, And, Or, \
evaluate as evalexpr
from ppdpy.exceptions import ExpressionSyntaxError
class TestLex(TestCase):
def test_ids(self):
self.assertEqual(list(lex('foo')), ['foo'])
self.assertEqual(list(lex('foo bar')), ['foo', 'bar'])
self.assertEqual(list(lex('foo bar')), ['foo', 'bar'])
self.assertEqual(list(lex(' foo bar')), ['foo', 'bar'])
self.assertEqual(list(lex('foo bar ')), ['foo', 'bar'])
self.assertEqual(list(lex(' foo bar ')), ['foo', 'bar'])
def test_parens(self):
self.assertEqual(list(lex('()')), ['(', ')'])
self.assertEqual(list(lex(' ()')), ['(', ')'])
self.assertEqual(list(lex('( )')), ['(', ')'])
self.assertEqual(list(lex('() ')), ['(', ')'])
self.assertEqual(list(lex(' ( ) ')), ['(', ')'])
self.assertEqual(list(lex(' ( ) ')), ['(', ')'])
self.assertEqual(list(lex(' ( ) ')), ['(', ')'])
self.assertEqual(list(lex('(())')), ['(', '(', ')', ')'])
self.assertEqual(list(lex('( ())')), ['(', '(', ')', ')'])
self.assertEqual(list(lex('(( ))')), ['(', '(', ')', ')'])
self.assertEqual(list(lex('(()) ')), ['(', '(', ')', ')'])
self.assertEqual(list(lex(')()(')), [')', '(', ')', '('])
def test_expressions(self):
self.assertEqual(list(lex('a and b')), ['a', 'and', 'b'])
self.assertEqual(list(lex('a and b or c')), ['a', 'and', 'b', 'or', 'c'])
self.assertEqual(list(lex('(a)')), ['(', 'a', ')'])
self.assertEqual(list(lex('(a and b) or c')), ['(', 'a', 'and', 'b', ')', 'or', 'c'])
self.assertEqual(list(lex('(a and b)or c')), ['(', 'a', 'and', 'b', ')', 'or', 'c'])
self.assertEqual(list(lex('not (a and b) or c')), ['not', '(', 'a', 'and', 'b', ')', 'or', 'c'])
self.assertEqual(list(lex('not(a and b)or c')), ['not', '(', 'a', 'and', 'b', ')', 'or', 'c'])
self.assertEqual(list(lex('a and (b or c)')), ['a', 'and', '(', 'b', 'or', 'c', ')'])
self.assertEqual(list(lex('a and(b or c)')), ['a', 'and', '(', 'b', 'or', 'c', ')'])
self.assertEqual(list(lex('a and not (b or c)')), ['a', 'and', 'not', '(', 'b', 'or', 'c', ')'])
self.assertEqual(list(lex('a and not(b or c)')), ['a', 'and', 'not', '(', 'b', 'or', 'c', ')'])
self.assertEqual(list(lex(' a and (b or c) ')), ['a', 'and', '(', 'b', 'or', 'c', ')'])
class TestParse(TestCase):
def test_sanity(self):
"""
Just checks if dataclass comparison works as expected.
"""
self.assertEqual(Id('a'), Id('a'))
self.assertEqual(Not(Id('a')), Not(Id('a')))
self.assertEqual(And(Id('a'), Id('b')), And(Id('a'), Id('b')))
self.assertEqual(Or(Id('a'), Id('b')), Or(Id('a'), Id('b')))
self.assertEqual(And(Id('a'), Or(Id('b'), Id('C'))), And(Id('a'), Or(Id('b'), Id('C'))))
self.assertNotEqual(Id('a'), Id('b'))
self.assertNotEqual(Id('a'), Not(Id('a')))
self.assertNotEqual(And(Id('a'), Id('b')), Or(Id('a'), Id('b')))
self.assertNotEqual(And(Id('a'), Id('b')), And(Id('a'), Id('c')))
self.assertNotEqual(Or(Id('a'), Id('b')), Or(Id('a'), Id('c')))
self.assertNotEqual(And(Id('a'), Or(Id('b'), Id('C'))), Or(And(Id('a'), Id('b')), Id('C')))
def test_simple(self):
self.assertEqual(compile('a'), Id('a'))
self.assertEqual(compile('A'), Id('A'))
self.assertEqual(compile('not a'), Not(Id('a')))
self.assertEqual(compile('not A'), Not(Id('A')))
self.assertEqual(compile('a and b'), And(Id('a'), Id('b')))
self.assertEqual(compile('a and b and c'), And(And(Id('a'), Id('b')), Id('c')))
self.assertEqual(compile('a and b and c and d'), And(And(And(Id('a'), Id('b')), Id('c')), Id('d')))
self.assertEqual(compile('a or b'), Or(Id('a'), Id('b')))
self.assertEqual(compile('a or b or c'), Or(Id('a'), Or(Id('b'), Id('c'))))
self.assertEqual(compile('a or b or c or d'), Or(Id('a'), Or(Id('b'), Or(Id('c'), Id('d')))))
def test_case_sensitivity(self):
self.assertEqual(compile('NOT a'), Not(Id('a')))
self.assertEqual(compile('a AND b'), And(Id('a'), Id('b')))
self.assertEqual(compile('a OR b'), Or(Id('a'), Id('b')))
def test_precedence(self):
self.assertEqual(compile('a or b and c'), Or(Id('a'), And(Id('b'), Id('c'))))
self.assertEqual(compile('a and b or c'), Or(And(Id('a'), Id('b')), Id('c')))
self.assertEqual(compile('a and b or c and d'), Or(And(Id('a'), Id('b')), And(Id('c'), Id('d'))))
self.assertEqual(compile('not a or b and c'), Or(Not(Id('a')), And(Id('b'), Id('c'))))
self.assertEqual(compile('a or not b and c'), Or(Id('a'), And(Not(Id('b')), Id('c'))))
self.assertEqual(compile('a or b and not c'), Or(Id('a'), And(Id('b'), Not(Id('c')))))
def test_nots(self):
self.assertEqual(compile('not a and b'), And(Not(Id('a')), Id('b')))
self.assertEqual(compile('a and not b'), And(Id('a'), Not(Id('b'))))
self.assertEqual(compile('not a and not b'), And(Not(Id('a')), Not(Id('b'))))
self.assertEqual(compile('not a or b'), Or(Not(Id('a')), Id('b')))
self.assertEqual(compile('a or not b'), Or(Id('a'), Not(Id('b'))))
self.assertEqual(compile('not a or not b'), Or(Not(Id('a')), Not(Id('b'))))
def test_parens(self):
self.assertEqual(compile('(a)'), Id('a'))
self.assertEqual(compile('not (a)'), Not(Id('a')))
self.assertEqual(compile('(a and b)'), And(Id('a'), Id('b')))
self.assertEqual(compile('((a) and (b))'), And(Id('a'), Id('b')))
self.assertEqual(compile('(((a)) and ((b)))'), And(Id('a'), Id('b')))
self.assertEqual(compile('a or (b and c)'), Or(Id('a'), And(Id('b'), Id('c'))))
self.assertEqual(compile('(a and b) or c'), Or(And(Id('a'), Id('b')), Id('c')))
self.assertEqual(compile('a and (b or c)'), And(Id('a'), Or(Id('b'), Id('c'))))
self.assertEqual(compile('(a or b) and c'), And(Or(Id('a'), Id('b')), Id('c')))
self.assertEqual(compile('(a and b) or (c and d)'), Or(And(Id('a'), Id('b')), And(Id('c'), Id('d'))))
self.assertEqual(compile('(a and b) or (c and d) or (e and f)'), Or(And(Id('a'), Id('b')), Or(And(Id('c'), Id('d')), And(Id('e'), Id('f')))))
self.assertEqual(compile('not (a and b)'), Not(And(Id('a'), Id('b'))))
self.assertEqual(compile('a and (not b or c)'), And(Id('a'), Or(Not(Id('b')), Id('c'))))
self.assertEqual(compile('a and not (b or c)'), And(Id('a'), Not(Or(Id('b'), Id('c')))))
self.assertEqual(compile('not (a and b) or c'), Or(Not(And(Id('a'), Id('b'))), Id('c')))
def test_precedence(self):
self.assertEqual(compile('a and b or c'), compile('(a and b) or c'))
self.assertEqual(compile('a or b and c'), compile('a or (b and c)'))
self.assertEqual(compile('a or not b'), compile('a or (not b)'))
self.assertEqual(compile('not a or b'), compile('(not a) or b'))
def test_errors(self):
with self.assertRaises(ExpressionSyntaxError):
compile('')
with self.assertRaises(ExpressionSyntaxError):
compile('and')
with self.assertRaises(ExpressionSyntaxError):
compile('or')
with self.assertRaises(ExpressionSyntaxError):
compile('not')
with self.assertRaises(ExpressionSyntaxError):
compile('()')
with self.assertRaises(ExpressionSyntaxError):
compile('a and')
with self.assertRaises(ExpressionSyntaxError):
compile('and a')
with self.assertRaises(ExpressionSyntaxError):
compile('a or')
with self.assertRaises(ExpressionSyntaxError):
compile('or a')
with self.assertRaises(ExpressionSyntaxError):
compile('a not')
with self.assertRaises(ExpressionSyntaxError):
compile('not and')
with self.assertRaises(ExpressionSyntaxError):
compile('not or')
with self.assertRaises(ExpressionSyntaxError):
compile('(a and b')
with self.assertRaises(ExpressionSyntaxError):
compile('a (and b')
with self.assertRaises(ExpressionSyntaxError):
compile('a and (b')
with self.assertRaises(ExpressionSyntaxError):
compile('a and b (')
with self.assertRaises(ExpressionSyntaxError):
compile(') a and b')
with self.assertRaises(ExpressionSyntaxError):
compile('a) and b')
with self.assertRaises(ExpressionSyntaxError):
compile('a and) b')
with self.assertRaises(ExpressionSyntaxError):
compile('a and b)')
with self.assertRaises(ExpressionSyntaxError):
compile('(a or b')
with self.assertRaises(ExpressionSyntaxError):
compile('a (or b')
with self.assertRaises(ExpressionSyntaxError):
compile('a or (b')
with self.assertRaises(ExpressionSyntaxError):
compile('a or b (')
with self.assertRaises(ExpressionSyntaxError):
compile(') a or b')
with self.assertRaises(ExpressionSyntaxError):
compile('a) or b')
with self.assertRaises(ExpressionSyntaxError):
compile('a or) b')
with self.assertRaises(ExpressionSyntaxError):
compile('a or b)')
with self.assertRaises(ExpressionSyntaxError):
compile('(a and b or c')
with self.assertRaises(ExpressionSyntaxError):
compile('((a and b) or c')
with self.assertRaises(ExpressionSyntaxError):
compile('(a and b) or c)')
class TestEval(TestCase):
def test_eval_simple(self):
self.assertEqual(evalexpr(Id('a'), {'a'}), True)
self.assertEqual(evalexpr(Id('a'), set()), False)
self.assertEqual(evalexpr(Not(Id('a')), {'a'}), False)
self.assertEqual(evalexpr(Not(Id('a')), set()), True)
self.assertEqual(evalexpr(Id('A'), {'a'}), False)
def test_eval_and(self):
expr = And(Id('a'), Id('b'))
self.assertEqual(evalexpr(expr, {'a', 'b'}), True)
self.assertEqual(evalexpr(expr, {'a'}), False)
self.assertEqual(evalexpr(expr, {'b'}), False)
self.assertEqual(evalexpr(expr, set()), False)
expr = And(Not(Id('a')), Id('b'))
self.assertEqual(evalexpr(expr, {'a', 'b'}), False)
self.assertEqual(evalexpr(expr, {'a'}), False)
self.assertEqual(evalexpr(expr, {'b'}), True)
self.assertEqual(evalexpr(expr, set()), False)
expr = Not(And(Id('a'), Id('b')))
self.assertEqual(evalexpr(expr, {'a', 'b'}), False)
self.assertEqual(evalexpr(expr, {'a'}), True)
self.assertEqual(evalexpr(expr, {'b'}), True)
self.assertEqual(evalexpr(expr, set()), True)
def test_eval_or(self):
expr = Or(Id('a'), Id('b'))
self.assertEqual(evalexpr(expr, {'a', 'b'}), True)
self.assertEqual(evalexpr(expr, {'a'}), True)
self.assertEqual(evalexpr(expr, {'b'}), True)
self.assertEqual(evalexpr(expr, set()), False)
expr = Or(Not(Id('a')), Id('b'))
self.assertEqual(evalexpr(expr, {'a', 'b'}), True)
self.assertEqual(evalexpr(expr, {'a'}), False)
self.assertEqual(evalexpr(expr, {'b'}), True)
self.assertEqual(evalexpr(expr, set()), True)
expr = Not(Or(Id('a'), Id('b')))
self.assertEqual(evalexpr(expr, {'a', 'b'}), False)
self.assertEqual(evalexpr(expr, {'a'}), False)
self.assertEqual(evalexpr(expr, {'b'}), False)
self.assertEqual(evalexpr(expr, set()), True)
def test_eval_and_or(self):
expr = And(Id('a'), Or(Id('b'), Id('c')))
self.assertEqual(evalexpr(expr, {'a', 'b', 'c'}), True)
self.assertEqual(evalexpr(expr, {'a', 'b'}), True)
self.assertEqual(evalexpr(expr, {'a', 'c'}), True)
self.assertEqual(evalexpr(expr, {'b', 'c'}), False)
self.assertEqual(evalexpr(expr, {'a'}), False)
self.assertEqual(evalexpr(expr, {'b'}), False)
self.assertEqual(evalexpr(expr, {'c'}), False)
self.assertEqual(evalexpr(expr, set()), False)
expr = Or(Id('a'), And(Id('b'), Id('c')))
self.assertEqual(evalexpr(expr, {'a', 'b', 'c'}), True)
self.assertEqual(evalexpr(expr, {'a', 'b'}), True)
self.assertEqual(evalexpr(expr, {'a', 'c'}), True)
self.assertEqual(evalexpr(expr, {'b', 'c'}), True)
self.assertEqual(evalexpr(expr, {'a'}), True)
self.assertEqual(evalexpr(expr, {'b'}), False)
self.assertEqual(evalexpr(expr, {'c'}), False)
self.assertEqual(evalexpr(expr, set()), False)
expr = Not(And(Id('a'), Or(Id('b'), Id('c'))))
self.assertEqual(evalexpr(expr, {'a', 'b', 'c'}), False)
self.assertEqual(evalexpr(expr, {'a', 'b'}), False)
self.assertEqual(evalexpr(expr, {'a', 'c'}), False)
self.assertEqual(evalexpr(expr, {'b', 'c'}), True)
self.assertEqual(evalexpr(expr, {'a'}), True)
self.assertEqual(evalexpr(expr, {'b'}), True)
self.assertEqual(evalexpr(expr, {'c'}), True)
self.assertEqual(evalexpr(expr, set()), True)
| 43.429032
| 149
| 0.537176
| 1,761
| 13,463
| 4.094265
| 0.034072
| 0.274619
| 0.169071
| 0.17975
| 0.931207
| 0.913454
| 0.826214
| 0.772677
| 0.736893
| 0.70638
| 0
| 0
| 0.222462
| 13,463
| 309
| 150
| 43.569579
| 0.688766
| 0.004011
| 0
| 0.450216
| 0
| 0
| 0.111775
| 0
| 0
| 0
| 0
| 0
| 0.731602
| 1
| 0.064935
| false
| 0
| 0.012987
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
af457c15982ddc0ce30528e437600a0b96df2cb6
| 90
|
py
|
Python
|
flowsaber/server/database/__init__.py
|
flowsaber/flowsaber
|
7d68d085bbd9165d2bc0e0acd7826e70569c5fa3
|
[
"MIT"
] | 31
|
2021-05-08T06:35:07.000Z
|
2022-03-05T05:58:24.000Z
|
flowsaber/server/database/__init__.py
|
flowsaber/flowsaber
|
7d68d085bbd9165d2bc0e0acd7826e70569c5fa3
|
[
"MIT"
] | 3
|
2021-05-10T12:36:57.000Z
|
2021-05-15T14:01:15.000Z
|
flowsaber/server/database/__init__.py
|
zhqu1148980644/flowsaber
|
7d68d085bbd9165d2bc0e0acd7826e70569c5fa3
|
[
"MIT"
] | 1
|
2021-03-09T06:18:17.000Z
|
2021-03-09T06:18:17.000Z
|
from flowsaber.server.database.db import *
from flowsaber.server.database.models import *
| 30
| 46
| 0.822222
| 12
| 90
| 6.166667
| 0.583333
| 0.351351
| 0.513514
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 90
| 2
| 47
| 45
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bb5df4b1656ec85e3f4dfafa446a43a046a44ee5
| 193
|
py
|
Python
|
exapi/response_handlers/hitbtc/market_data/__init__.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
exapi/response_handlers/hitbtc/market_data/__init__.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
exapi/response_handlers/hitbtc/market_data/__init__.py
|
astsu-dev/exapi
|
1ef39ccdd77e9ddb60ec6eaa16a2cc26e1ac3e12
|
[
"MIT"
] | null | null | null |
from exapi.response_handlers.hitbtc.market_data.handler import HitbtcMarketDataResponseHandler
from exapi.response_handlers.hitbtc.market_data.interface import IHitbtcMarketDataResponseHandler
| 64.333333
| 97
| 0.917098
| 20
| 193
| 8.65
| 0.6
| 0.104046
| 0.196532
| 0.289017
| 0.473988
| 0.473988
| 0.473988
| 0
| 0
| 0
| 0
| 0
| 0.041451
| 193
| 2
| 98
| 96.5
| 0.935135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bb892ca5814b1925e1a0611b31a7a18d7d5b2fd8
| 10,856
|
py
|
Python
|
dyno_world_state/test/world_state/test_visualizer.py
|
samiamlabs/dyno
|
38bff1ad5dc99293f57c2c6411176337684e763b
|
[
"BSD-3-Clause"
] | 15
|
2019-02-18T21:18:38.000Z
|
2021-09-20T12:11:13.000Z
|
dyno_world_state/test/world_state/test_visualizer.py
|
samiamlabs/dyno
|
38bff1ad5dc99293f57c2c6411176337684e763b
|
[
"BSD-3-Clause"
] | null | null | null |
dyno_world_state/test/world_state/test_visualizer.py
|
samiamlabs/dyno
|
38bff1ad5dc99293f57c2c6411176337684e763b
|
[
"BSD-3-Clause"
] | 5
|
2020-02-20T07:34:32.000Z
|
2021-06-08T08:09:58.000Z
|
# -*- coding: utf-8 -*-
import pytest
import rospy
import time
import os
from geometry_msgs.msg import *
from std_srvs.srv import *
from visualization_msgs.msg import Marker, MarkerArray
from dyno_msgs.msg import *
from dyno_msgs.srv import *
NAME = 'world_state_test'
@pytest.fixture
def node():
rospy.init_node(NAME, anonymous=True)
clear_robots()
clear_locations()
clear_objects()
clear_objects_on_robots()
@pytest.fixture
def waiter():
class Waiter(object):
def __init__(self):
self.received = []
self.condition = lambda x: False
@property
def success(self):
return True in self.received
def callback(self, data):
self.received.append(self.condition(data))
def wait(self, timeout):
timeout_t = time.time() + timeout
while not rospy.is_shutdown() and not self.success and time.time() < timeout_t:
time.sleep(0.1)
def reset(self):
self.received = []
return Waiter()
def clear_robots():
service_name = '/world_state/clear_robots'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, Empty)(EmptyRequest())
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def set_robots(robots):
request = SetRobotsRequest(robots=robots)
service_name = '/world_state/set_robots'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, SetRobots)(request)
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def clear_objects():
service_name = '/world_state/clear_objects'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, Empty)(EmptyRequest())
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def set_objects(objects):
request = SetObjectsRequest(objects=objects)
service_name = '/world_state/set_objects'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, SetObjects)(request)
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def clear_locations():
service_name = '/world_state/clear_locations'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, Empty)(EmptyRequest())
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def set_locations(locations):
request = SetLocationsRequest(locations=locations)
service_name = '/world_state/set_locations'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, SetLocations)(request)
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def get_robots_at_locations():
service_name = '/world_state/get_robots_at_locations'
rospy.wait_for_service(service_name)
try:
return rospy.ServiceProxy(service_name, GetRobotsAtLocations)().robots_at_locations
except rospy.ServiceException, e:
print "Service call failed: %s" % e
return []
def get_objects_at_locations():
service_name = '/world_state/get_objects_at_locations'
rospy.wait_for_service(service_name)
try:
return rospy.ServiceProxy(service_name, GetObjectsAtLocations)().objects_at_locations
except rospy.ServiceException, e:
print "Service call failed: %s" % e
return []
def clear_objects_on_robots():
service_name = '/world_state/clear_objects_on_robots'
rospy.wait_for_service(service_name)
try:
rospy.ServiceProxy(service_name, Empty)()
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def get_objects_on_robots():
service_name = '/world_state/get_objects_on_robots'
rospy.wait_for_service(service_name)
try:
return rospy.ServiceProxy(service_name, GetObjectsOnRobots)().objects_on_robots
except rospy.ServiceException, e:
print "Service call failed: %s" % e
return []
def add_object_on_robot(object_name, robot_name):
object_on_robot = ObjectOnRobot(robot_name=robot_name, object_name=object_name)
request = AddObjectOnRobotRequest(object_on_robot=object_on_robot)
service_name = '/world_state/add_object_on_robot'
rospy.wait_for_service(service_name)
try:
return rospy.ServiceProxy(service_name, AddObjectOnRobot)(request).success
except rospy.ServiceException, e:
print "Service call failed: %s" % e
return SetObjectOnRobotResponse()
def remove_object_on_robot(object_name, robot_name):
object_on_robot = ObjectOnRobot(robot_name=robot_name, object_name=object_name)
request = RemoveObjectOnRobotRequest(object_on_robot=object_on_robot)
service_name = '/world_state/remove_object_on_robot'
rospy.wait_for_service(service_name)
try:
return rospy.ServiceProxy(service_name, RemoveObjectOnRobot)(request).success
except rospy.ServiceException, e:
print "Service call failed: %s" % e
return RemoveObjectOnRobotResponse()
def test_it_publishes_object_marker(node, waiter):
objects = []
pose = Pose(position=Point(x=0.1, y=0.2, z=0.3))
objects.append(Object(name='red_box', type='parcel', pose=pose))
set_objects(objects)
waiter.condition = lambda data: object_marker_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def object_marker_condition(data):
success = False
for marker in data.markers:
if marker.ns == 'objects':
success = True
success = success and marker.pose.position.x == 0.1
success = success and marker.pose.position.y == 0.2
success = success and marker.pose.position.z == 0.3
success = success and marker.header.frame_id == 'map'
return success
def test_it_publishes_object_marker_label(node, waiter):
objects = []
pose = Pose(position=Point(x=0.1, y=0.2, z=0.3))
objects.append(Object(name='red_box', type='parcel', pose=pose))
set_objects(objects)
waiter.condition = lambda data: object_marker_label_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def object_marker_label_condition(data):
success = False
for marker in data.markers:
if marker.ns == 'object_labels':
success = True
success = success and marker.pose.position.x == 0.1
success = success and marker.pose.position.y == 0.2
success = success and marker.pose.position.z == 0.3 + 0.25
success = success and marker.header.frame_id == 'map'
return success
def test_it_publishes_object_markers(node, waiter):
objects = []
pose = Pose(position=Point(x=0.1, y=0.2, z=0.3))
objects.append(Object(name='red_box', type='parcel', pose=pose))
objects.append(Object(name='blue_box', type='parcel', pose=pose))
objects.append(Object(name='green_box', type='parcel', pose=pose))
set_objects(objects)
waiter.condition = lambda data: object_markers_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def object_markers_condition(data):
success = False
marker_counter = 0
for marker in data.markers:
if marker.ns == 'objects':
success = True
marker_counter += 1
success = success and marker_counter == 3
return success
def test_it_publishes_location_marker(node, waiter):
locations = []
pose = Pose(position=Point(x=0.2, y=0.3, z=0.4))
locations.append(Location(name='start', pose=pose))
set_locations(locations)
waiter.condition = lambda data: location_marker_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def location_marker_condition(data):
success = False
for marker in data.markers:
if marker.ns == 'locations':
success = True
success = success and marker.pose.position.x == 0.2
success = success and marker.pose.position.y == 0.3
success = success and marker.pose.position.z == 0.0
success = success and marker.header.frame_id == 'map'
return success
def test_it_publishes_location_markers(node, waiter):
locations = []
pose = Pose(position=Point(x=0.2, y=0.3, z=0.4))
locations.append(Location(name='start', pose=pose))
locations.append(Location(name='above', pose=pose))
locations.append(Location(name='below', pose=pose))
set_locations(locations)
waiter.condition = lambda data: location_markers_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def location_markers_condition(data):
success = False
marker_counter = 0
for marker in data.markers:
if marker.ns == 'objects':
success = True
marker_counter += 1
success = success and marker_counter == 3
return success
def test_it_publishes_robot_marker(node, waiter):
robots = []
pose = Pose(position=Point(x=0.3, y=0.4, z=0.5))
robots.append(Robot(name='blue', pose=pose))
set_robots(robots)
waiter.condition = lambda data: robot_marker_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def robot_marker_condition(data):
success = False
for marker in data.markers:
if marker.ns == 'robots':
success = True
success = success and marker.pose.position.x == 0.3
success = success and marker.pose.position.y == 0.4
success = success and marker.pose.position.z == 0.5
success = success and marker.header.frame_id == 'map'
return success
def test_it_publishes_robot_markers(node, waiter):
robots = []
pose = Pose(position=Point(x=0.3, y=0.4, z=0.5))
robots.append(Robot(name='blue', pose=pose))
robots.append(Robot(name='red', pose=pose))
robots.append(Robot(name='green', pose=pose))
set_robots(robots)
waiter.condition = lambda data: robot_markers_condition(data)
rospy.Subscriber('/world_state/markers', MarkerArray, waiter.callback)
waiter.wait(1.0)
assert waiter.success
def robot_markers_condition(data):
success = False
marker_counter = 0
for marker in data.markers:
if marker.ns == 'robots':
success = True
marker_counter += 1
success = success and marker_counter == 3
return success
| 29.824176
| 93
| 0.684783
| 1,386
| 10,856
| 5.173882
| 0.095238
| 0.055222
| 0.045043
| 0.06094
| 0.834891
| 0.814531
| 0.783712
| 0.760284
| 0.739227
| 0.729884
| 0
| 0.011072
| 0.209654
| 10,856
| 363
| 94
| 29.906336
| 0.824709
| 0.001934
| 0
| 0.595506
| 0
| 0
| 0.08908
| 0.033416
| 0
| 0
| 0
| 0
| 0.026217
| 0
| null | null | 0
| 0.033708
| null | null | 0.044944
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bbcc8be14899a5e62dc8f9b44cf753d57ea4c203
| 13,675
|
py
|
Python
|
caoliu.py
|
wangfengfighting/Caoliu-master
|
9025b1871ec7c2322463d97650ccd5b47f784cbb
|
[
"Apache-2.0"
] | null | null | null |
caoliu.py
|
wangfengfighting/Caoliu-master
|
9025b1871ec7c2322463d97650ccd5b47f784cbb
|
[
"Apache-2.0"
] | null | null | null |
caoliu.py
|
wangfengfighting/Caoliu-master
|
9025b1871ec7c2322463d97650ccd5b47f784cbb
|
[
"Apache-2.0"
] | null | null | null |
#coding=utf-8
#-*- coding: UTF-8 -*-
import urllib
# import socket
import time
import urllib2
import os
from bs4 import BeautifulSoup
#---------------------V1.3图片板块版----------------------#
#可以过滤指定作者的作品 http://www.cl529.com/index.php http://www.cl529.com/thread0806.php?fid=16
_img_pre = ''
_headers = {'User-Agent':'Mozilla/5.0'};
_data = ''
_pageStart = 1
_pageEnd = 2
_localUrl = 'http://www.cl529.com/'
_encode = 'gbk'
def findUrl():
global _data
global _headers
global _pageStart
global _pageEnd
global _localUrl
global _encode
for pageNo in range(_pageStart, _pageEnd):
url = _localUrl + 'thread0806.php?fid=16&page=' + pageNo.__str__()
print 'Search Url:', url
# 设置请求参数
req = urllib2.Request(url, _data, _headers)
# 打开网站代码
data = urllib2.urlopen(req)
soup = BeautifulSoup(data, from_encoding=_encode)
trs = soup.findAll('tr', {'class':'tr3 t_one'})
for tr in trs:
author = tr.find('a', {'class':'bl'}).string
url = tr.find('a', {'title':'打開新窗口'})['href'].__str__()
# list中移除需要被过滤的作者
# list中移除需要被过滤的作者
if author.find('第六天') == -1 and url.find('1502') != -1:
print '-----------------------------author:[' + author + ']-----------------------------'
downUrl = _localUrl + url
print 'Accept down url:[' + downUrl + ']'
date(downUrl, author)
# downs = soup.findAll('a', {'title':'打开新窗口'})
# for down in downs:
# downUrl = _localUrl + down['href'].__str__()
# if downUrl.find('1308') == -1:
# print 'Except url:', downUrl
# continue
# print '--------------------------------------------------------------------'
# print 'Accept down url:', downUrl
# date(downUrl)
def date(url, author):
global _data
global _headers
global _encode
# 设置超时,有的图片下不动,需要逃过去
#socket.setdefaulttimeout(10)
# 设置请求参数
req = urllib2.Request(url, _data, _headers)
# 打开网站代码
data = urllib2.urlopen(req)
time.sleep(3)
soup = BeautifulSoup(data, from_encoding = _encode)
# 按照URL地址的最终目录数创建文件夹
url = url[-12:]
title = soup.title.string.replace(' 草榴社區 - powered by phpwind.net','') + '+' + url
title = title.replace('/','-').replace(':','-')
title = '[' + author + ']' + title
# print title
new_path = os.path.join(os.path.abspath("./down/"), title).decode('utf-8')
if not os.path.isdir(new_path):
try:
os.makedirs(new_path)
except:
print '....................... Except! :( .......................'
imgs = soup.findAll('img', {'style':'cursor:pointer'})
down(imgs, new_path)
else:
print ':::::::This url:[[' + url + ']] is downed!:::::::'
def down(imgs, new_path):
global _img_pre
# 遍历地址列表,保存图片
i = 0
j = 0
for img in imgs:
# 这个处理链接异常
img = img['src'].__str__()
if len(img) < 100:
# 因为保存的是<imag src....>的格式,需要重http://格式引用
# print '--------------------------------------------------------------------'
print "Image url:", img
# 如果超时了,就输出time out
try:
if img[-30:] == _img_pre:
print "[Repeat IMGURL]:" + img
# print '--------------------------------------------------------------------'
continue
i += 1
imgName = i.__str__() + img[-7:]
local = os.path.join(new_path, imgName)
# 这个是保存函数,第一个参数是地址,第二个是保存的文件名,让地址的倒数8位,当做文件名
class AppURLopener(urllib.FancyURLopener):
version = "Mozilla/5.0"
urllib._urlopener = AppURLopener()
urllib.urlretrieve(img, local)
j += 1
print 'Success download image[ ' + imgName + ' ]to ' + new_path[-11:]
# print '--------------------------------------------------------------------'
_img_pre = img[-30:]
except:
print 'Time out or download fail!'
i -= 1
print 'Success down NUM[' + j.__str__() + ']'
def main():
findUrl()
print '***************************【END】***************************'
#date()
if __name__ == '__main__':
main()
# #coding=utf-8
# #-*- coding: UTF-8 -*-
# import urllib
# # import socket
# import time
# import urllib2
# import os
# from bs4 import BeautifulSoup
#
# #---------------------V1.2下载优化版----------------------#
# #可以过滤指定作者的作品
#
# _img_pre = ''
# _headers = {'User-Agent':'Mozilla/5.0'};
# _data = ''
# _pageStart = 1
# _pageEnd = 2
# _localUrl = 'http://www.t66y.com/'
# _encode = 'gbk'
#
# def findUrl():
# global _data
# global _headers
# global _pageStart
# global _pageEnd
# global _localUrl
# global _encode
#
# for pageNo in range(_pageStart, _pageEnd):
# url = _localUrl + 'thread0806.php?fid=2&page=' + pageNo.__str__()
# print 'Search Url:', url
# # 设置请求参数
# req = urllib2.Request(url, _data, _headers)
# # 打开网站代码
# data = urllib2.urlopen(req)
# soup = BeautifulSoup(data, from_encoding=_encode)
#
#
# trs = soup.findAll('tr', {'class':'tr3 t_one'})
# for tr in trs:
# author = tr.find('a', {'class':'bl'}).string
# url = tr.find('a', {'title':'打開新窗口'})['href'].__str__()
# # list中移除需要被过滤的作者
# # list中移除需要被过滤的作者
# if author.find('第六天') == -1 and url.find('1412') != -1:
# print '-----------------------------author:[' + author + ']-----------------------------'
# downUrl = _localUrl + url
# print 'Accept down url:[' + downUrl + ']'
# date(downUrl, author)
#
#
# # downs = soup.findAll('a', {'title':'打开新窗口'})
# # for down in downs:
# # downUrl = _localUrl + down['href'].__str__()
# # if downUrl.find('1308') == -1:
# # print 'Except url:', downUrl
# # continue
# # print '--------------------------------------------------------------------'
# # print 'Accept down url:', downUrl
# # date(downUrl)
#
# def date(url, author):
# global _data
# global _headers
# global _encode
# # 设置超时,有的图片下不动,需要逃过去
# #socket.setdefaulttimeout(10)
# # 设置请求参数
# req = urllib2.Request(url, _data, _headers)
# # 打开网站代码
# data = urllib2.urlopen(req)
# time.sleep(3)
# soup = BeautifulSoup(data, from_encoding = _encode)
# # 按照URL地址的最终目录数创建文件夹
# url = url[-12:]
# title = soup.title.string.replace(' 草榴社區 - powered by phpwind.net','') + '+' + url
# title = title.replace('/','-').replace(':','-')
# title = '[' + author + ']' + title
# # print title
# new_path = os.path.join(os.path.abspath("./down/"), title).decode('utf-8')
# if not os.path.isdir(new_path):
# try:
# os.makedirs(new_path)
# except:
# print '....................... Except! :( .......................'
# imgs = soup.findAll('img', {'style':'cursor:pointer'})
# down(imgs, new_path)
# else:
# print ':::::::This url:[[' + url + ']] is downed!:::::::'
#
# def down(imgs, new_path):
# global _img_pre
# # 遍历地址列表,保存图片
# i = 0
# j = 0
# for img in imgs:
# # 这个处理链接异常
# img = img['src'].__str__()
# if len(img) < 100:
# # 因为保存的是<imag src....>的格式,需要重http://格式引用
# # print '--------------------------------------------------------------------'
# print "Image url:", img
# # 如果超时了,就输出time out
# try:
# if img[-30:] == _img_pre:
# print "[Repeat IMGURL]:" + img
# # print '--------------------------------------------------------------------'
# continue
# i += 1
# imgName = i.__str__() + img[-7:]
# local = os.path.join(new_path, imgName)
# # 这个是保存函数,第一个参数是地址,第二个是保存的文件名,让地址的倒数8位,当做文件名
# class AppURLopener(urllib.FancyURLopener):
# version = "Mozilla/5.0"
# urllib._urlopener = AppURLopener()
# urllib.urlretrieve(img, local)
# j += 1
# print 'Success download image[ ' + imgName + ' ]to ' + new_path[-11:]
# # print '--------------------------------------------------------------------'
# _img_pre = img[-30:]
# except:
# print 'Time out or download fail!'
# i -= 1
# print 'Success down NUM[' + j.__str__() + ']'
#
# def main():
# findUrl()
# print '***************************【END】***************************'
# #date()
# if __name__ == '__main__':
# main()
#---------------------V1.0----------------------#
# #coding=utf-8
# #-*- coding: UTF-8 -*-
# import urllib
# import socket
# import urllib2
# import os
# from bs4 import BeautifulSoup
#
# _img_pre = ''
# _headers = {'User-Agent':'Mozilla/5.0'};
# _data = ''
# _pageStart = 1
# _pageEnd = 2
# _localUrl = 'http://www.t66y.com/'
#
# def findUrl():
# global _data
# global _headers
# global _pageStart
# global _pageEnd
# global _localUrl
#
# for pageNo in range(_pageStart, _pageEnd):
# url = _localUrl + 'thread0806.php?fid=2&page=' + pageNo.__str__()
# print 'Search Url:', url
# # 设置请求参数
# req = urllib2.Request(url, _data, _headers)
# # 打开网站代码
# data = urllib2.urlopen(req)
# soup = BeautifulSoup(data, from_encoding='gb2312')
# downs = soup.findAll('a', {'title':'打开新窗口'})
# for down in downs:
# downUrl = _localUrl + down['href'].__str__()
# if downUrl.find('1308') == -1:
# print 'Except url:', downUrl
# continue
# print '--------------------------------------------------------------------'
# print 'Accept down url:', downUrl
# date(downUrl)
#
# def date(url):
# global _data
# global _headers
# # 引号内的为网址
# print url
# # 按照URL地址的最终目录数创建文件夹
# new_path = os.path.join(os.path.abspath("./"), url[-11:])
# if not os.path.isdir(new_path):
# os.makedirs(new_path)
# # 设置超时,有的图片下不动,需要逃过去
# #socket.setdefaulttimeout(10)
# # 设置请求参数
# req = urllib2.Request(url, _data, _headers)
# # 打开网站代码
# data = urllib2.urlopen(req)
# soup = BeautifulSoup(data, from_encoding='gb2312')
# imgs = soup.findAll('img', {'style':'cursor:pointer'})
# down(imgs, new_path)
#
# def down(imgs, new_path):
# global _img_pre
# # 遍历地址列表,保存图片
# i = 0
# j = 0
# for img in imgs:
# # 这个处理链接异常
# img = img['src'].__str__()
# if len(img) < 100:
# # 因为保存的是<imag src....>的格式,需要重http://格式引用
# print '--------------------------------------------------------------------'
# print "Image url:", img
# # 如果超时了,就输出time out
# try:
# if img[-30:] == _img_pre:
# print "图片地址重复,不必下载"
# print '--------------------------------------------------------------------'
# continue
# i += 1
# imgName = i.__str__() + img[-7:]
# local = os.path.join(new_path, imgName)
# # 这个是保存函数,第一个参数是地址,第二个是保存的文件名,让地址的倒数8位,当做文件名
# class AppURLopener(urllib.FancyURLopener):
# version = "Mozilla/5.0"
# urllib._urlopener = AppURLopener()
# urllib.urlretrieve(img, local)
# j += 1
# print 'Success download image[ ' + imgName + ' ]to ' + new_path[-11:]
# print '--------------------------------------------------------------------'
# _img_pre = img[-30:]
# except:
# print 'Time out or download fail!'
# i -= 1
# print 'Success down NUM[' + j.__str__() + ']'
#
# def main():
# findUrl()
# #date()
# if __name__ == '__main__':
# main()
#---------------------V0.1----------------------#
# data = urllib2.urlopen(img)
# jpg = data.read()
# fp = open(local, 'wb')
# fp.write(jpg)
# fp.close()
# def date():
# global _data
# global _headers
# # 页码开始
# pageUp = 947597
# # 页码结束
# pageDown = 947598
# for pageNo in range(pageUp, pageDown):
# # 引号内的为网址
# url = 'http://1024go.tk/htm_data/2/1308/' + pageNo.__str__() + '.html'
# print url
# # 按照URL地址的最终目录数创建文件夹
# new_path = os.path.join(os.path.abspath("./"), pageNo.__str__() + '.html')
# if not os.path.isdir(new_path):
# os.makedirs(new_path)
# # 设置超时,有的图片下不动,需要逃过去
# #socket.setdefaulttimeout(10)
# # 设置请求参数
# req = urllib2.Request(url, _data, _headers)
# # 打开网站代码
# data = urllib2.urlopen(req)
# soup = BeautifulSoup(data, from_encoding='gb2312')
# imgs = soup.findAll('img', {'style':'cursor:pointer'})
# down(imgs, new_path)
| 33.682266
| 107
| 0.448775
| 1,264
| 13,675
| 4.66693
| 0.148734
| 0.029666
| 0.024411
| 0.027293
| 0.936938
| 0.933887
| 0.933887
| 0.933887
| 0.926937
| 0.926937
| 0
| 0.023171
| 0.318318
| 13,675
| 405
| 108
| 33.765432
| 0.609633
| 0.710859
| 0
| 0.183908
| 0
| 0
| 0.156988
| 0.055293
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.057471
| null | null | 0.126437
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5a93eb76d54f376d5b6642ad17fbeea17183e2c
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_viktor/na_viktor_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_viktor/na_viktor_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_viktor/na_viktor_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Viktor_Mid_Aatrox(Ratings):
pass
class NA_Viktor_Mid_Ahri(Ratings):
pass
class NA_Viktor_Mid_Akali(Ratings):
pass
class NA_Viktor_Mid_Alistar(Ratings):
pass
class NA_Viktor_Mid_Amumu(Ratings):
pass
class NA_Viktor_Mid_Anivia(Ratings):
pass
class NA_Viktor_Mid_Annie(Ratings):
pass
class NA_Viktor_Mid_Ashe(Ratings):
pass
class NA_Viktor_Mid_AurelionSol(Ratings):
pass
class NA_Viktor_Mid_Azir(Ratings):
pass
class NA_Viktor_Mid_Bard(Ratings):
pass
class NA_Viktor_Mid_Blitzcrank(Ratings):
pass
class NA_Viktor_Mid_Brand(Ratings):
pass
class NA_Viktor_Mid_Braum(Ratings):
pass
class NA_Viktor_Mid_Caitlyn(Ratings):
pass
class NA_Viktor_Mid_Camille(Ratings):
pass
class NA_Viktor_Mid_Cassiopeia(Ratings):
pass
class NA_Viktor_Mid_Chogath(Ratings):
pass
class NA_Viktor_Mid_Corki(Ratings):
pass
class NA_Viktor_Mid_Darius(Ratings):
pass
class NA_Viktor_Mid_Diana(Ratings):
pass
class NA_Viktor_Mid_Draven(Ratings):
pass
class NA_Viktor_Mid_DrMundo(Ratings):
pass
class NA_Viktor_Mid_Ekko(Ratings):
pass
class NA_Viktor_Mid_Elise(Ratings):
pass
class NA_Viktor_Mid_Evelynn(Ratings):
pass
class NA_Viktor_Mid_Ezreal(Ratings):
pass
class NA_Viktor_Mid_Fiddlesticks(Ratings):
pass
class NA_Viktor_Mid_Fiora(Ratings):
pass
class NA_Viktor_Mid_Fizz(Ratings):
pass
class NA_Viktor_Mid_Galio(Ratings):
pass
class NA_Viktor_Mid_Gangplank(Ratings):
pass
class NA_Viktor_Mid_Garen(Ratings):
pass
class NA_Viktor_Mid_Gnar(Ratings):
pass
class NA_Viktor_Mid_Gragas(Ratings):
pass
class NA_Viktor_Mid_Graves(Ratings):
pass
class NA_Viktor_Mid_Hecarim(Ratings):
pass
class NA_Viktor_Mid_Heimerdinger(Ratings):
pass
class NA_Viktor_Mid_Illaoi(Ratings):
pass
class NA_Viktor_Mid_Irelia(Ratings):
pass
class NA_Viktor_Mid_Ivern(Ratings):
pass
class NA_Viktor_Mid_Janna(Ratings):
pass
class NA_Viktor_Mid_JarvanIV(Ratings):
pass
class NA_Viktor_Mid_Jax(Ratings):
pass
class NA_Viktor_Mid_Jayce(Ratings):
pass
class NA_Viktor_Mid_Jhin(Ratings):
pass
class NA_Viktor_Mid_Jinx(Ratings):
pass
class NA_Viktor_Mid_Kalista(Ratings):
pass
class NA_Viktor_Mid_Karma(Ratings):
pass
class NA_Viktor_Mid_Karthus(Ratings):
pass
class NA_Viktor_Mid_Kassadin(Ratings):
pass
class NA_Viktor_Mid_Katarina(Ratings):
pass
class NA_Viktor_Mid_Kayle(Ratings):
pass
class NA_Viktor_Mid_Kayn(Ratings):
pass
class NA_Viktor_Mid_Kennen(Ratings):
pass
class NA_Viktor_Mid_Khazix(Ratings):
pass
class NA_Viktor_Mid_Kindred(Ratings):
pass
class NA_Viktor_Mid_Kled(Ratings):
pass
class NA_Viktor_Mid_KogMaw(Ratings):
pass
class NA_Viktor_Mid_Leblanc(Ratings):
pass
class NA_Viktor_Mid_LeeSin(Ratings):
pass
class NA_Viktor_Mid_Leona(Ratings):
pass
class NA_Viktor_Mid_Lissandra(Ratings):
pass
class NA_Viktor_Mid_Lucian(Ratings):
pass
class NA_Viktor_Mid_Lulu(Ratings):
pass
class NA_Viktor_Mid_Lux(Ratings):
pass
class NA_Viktor_Mid_Malphite(Ratings):
pass
class NA_Viktor_Mid_Malzahar(Ratings):
pass
class NA_Viktor_Mid_Maokai(Ratings):
pass
class NA_Viktor_Mid_MasterYi(Ratings):
pass
class NA_Viktor_Mid_MissFortune(Ratings):
pass
class NA_Viktor_Mid_MonkeyKing(Ratings):
pass
class NA_Viktor_Mid_Mordekaiser(Ratings):
pass
class NA_Viktor_Mid_Morgana(Ratings):
pass
class NA_Viktor_Mid_Nami(Ratings):
pass
class NA_Viktor_Mid_Nasus(Ratings):
pass
class NA_Viktor_Mid_Nautilus(Ratings):
pass
class NA_Viktor_Mid_Nidalee(Ratings):
pass
class NA_Viktor_Mid_Nocturne(Ratings):
pass
class NA_Viktor_Mid_Nunu(Ratings):
pass
class NA_Viktor_Mid_Olaf(Ratings):
pass
class NA_Viktor_Mid_Orianna(Ratings):
pass
class NA_Viktor_Mid_Ornn(Ratings):
pass
class NA_Viktor_Mid_Pantheon(Ratings):
pass
class NA_Viktor_Mid_Poppy(Ratings):
pass
class NA_Viktor_Mid_Quinn(Ratings):
pass
class NA_Viktor_Mid_Rakan(Ratings):
pass
class NA_Viktor_Mid_Rammus(Ratings):
pass
class NA_Viktor_Mid_RekSai(Ratings):
pass
class NA_Viktor_Mid_Renekton(Ratings):
pass
class NA_Viktor_Mid_Rengar(Ratings):
pass
class NA_Viktor_Mid_Riven(Ratings):
pass
class NA_Viktor_Mid_Rumble(Ratings):
pass
class NA_Viktor_Mid_Ryze(Ratings):
pass
class NA_Viktor_Mid_Sejuani(Ratings):
pass
class NA_Viktor_Mid_Shaco(Ratings):
pass
class NA_Viktor_Mid_Shen(Ratings):
pass
class NA_Viktor_Mid_Shyvana(Ratings):
pass
class NA_Viktor_Mid_Singed(Ratings):
pass
class NA_Viktor_Mid_Sion(Ratings):
pass
class NA_Viktor_Mid_Sivir(Ratings):
pass
class NA_Viktor_Mid_Skarner(Ratings):
pass
class NA_Viktor_Mid_Sona(Ratings):
pass
class NA_Viktor_Mid_Soraka(Ratings):
pass
class NA_Viktor_Mid_Swain(Ratings):
pass
class NA_Viktor_Mid_Syndra(Ratings):
pass
class NA_Viktor_Mid_TahmKench(Ratings):
pass
class NA_Viktor_Mid_Taliyah(Ratings):
pass
class NA_Viktor_Mid_Talon(Ratings):
pass
class NA_Viktor_Mid_Taric(Ratings):
pass
class NA_Viktor_Mid_Teemo(Ratings):
pass
class NA_Viktor_Mid_Thresh(Ratings):
pass
class NA_Viktor_Mid_Tristana(Ratings):
pass
class NA_Viktor_Mid_Trundle(Ratings):
pass
class NA_Viktor_Mid_Tryndamere(Ratings):
pass
class NA_Viktor_Mid_TwistedFate(Ratings):
pass
class NA_Viktor_Mid_Twitch(Ratings):
pass
class NA_Viktor_Mid_Udyr(Ratings):
pass
class NA_Viktor_Mid_Urgot(Ratings):
pass
class NA_Viktor_Mid_Varus(Ratings):
pass
class NA_Viktor_Mid_Vayne(Ratings):
pass
class NA_Viktor_Mid_Veigar(Ratings):
pass
class NA_Viktor_Mid_Velkoz(Ratings):
pass
class NA_Viktor_Mid_Vi(Ratings):
pass
class NA_Viktor_Mid_Viktor(Ratings):
pass
class NA_Viktor_Mid_Vladimir(Ratings):
pass
class NA_Viktor_Mid_Volibear(Ratings):
pass
class NA_Viktor_Mid_Warwick(Ratings):
pass
class NA_Viktor_Mid_Xayah(Ratings):
pass
class NA_Viktor_Mid_Xerath(Ratings):
pass
class NA_Viktor_Mid_XinZhao(Ratings):
pass
class NA_Viktor_Mid_Yasuo(Ratings):
pass
class NA_Viktor_Mid_Yorick(Ratings):
pass
class NA_Viktor_Mid_Zac(Ratings):
pass
class NA_Viktor_Mid_Zed(Ratings):
pass
class NA_Viktor_Mid_Ziggs(Ratings):
pass
class NA_Viktor_Mid_Zilean(Ratings):
pass
class NA_Viktor_Mid_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
3c89beb91c7570d6467e552b352c3b376c8c2fa3
| 300
|
py
|
Python
|
dataclasses_json/__init__.py
|
obi1kenobi/dataclasses-json
|
31903c3a89e8a06eb26106c8cca054ca55a07e70
|
[
"MIT"
] | 1
|
2020-01-10T12:16:17.000Z
|
2020-01-10T12:16:17.000Z
|
dataclasses_json/__init__.py
|
obi1kenobi/dataclasses-json
|
31903c3a89e8a06eb26106c8cca054ca55a07e70
|
[
"MIT"
] | null | null | null |
dataclasses_json/__init__.py
|
obi1kenobi/dataclasses-json
|
31903c3a89e8a06eb26106c8cca054ca55a07e70
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from dataclasses_json.api import (DataClassJsonMixin,
LetterCase,
config,
dataclass_json,
Undefined)
from dataclasses_json.undefined import CatchAll
| 37.5
| 53
| 0.463333
| 19
| 300
| 7.157895
| 0.684211
| 0.220588
| 0.279412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006711
| 0.503333
| 300
| 7
| 54
| 42.857143
| 0.90604
| 0.04
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b1ed6083a4686d1da67c8867a3afd4d89b745c08
| 87,431
|
py
|
Python
|
code/training.py
|
MFRIbrahim/Heart-Arrhythmia-Modelling-with-Invertible-Neural-Networks
|
d61471dd28b42b4320de64df1515719781211aef
|
[
"MIT"
] | null | null | null |
code/training.py
|
MFRIbrahim/Heart-Arrhythmia-Modelling-with-Invertible-Neural-Networks
|
d61471dd28b42b4320de64df1515719781211aef
|
[
"MIT"
] | null | null | null |
code/training.py
|
MFRIbrahim/Heart-Arrhythmia-Modelling-with-Invertible-Neural-Networks
|
d61471dd28b42b4320de64df1515719781211aef
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
import datagen
import config
import model as Model
import random
import os
import json
from multiprocessing import Pool
from pathlib import Path
from mavb.forward import simulate_type_1
from mavb.forward import simulate_type_2a
from mavb.forward import simulate_type_2b
from mavb.forward import simulate_type_2c
from mavb.forward import simulate_type_3
import warnings
warnings.filterwarnings('ignore')
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
networks_folder = Path("models/")
constants_folder = Path("constants/")
def append_new_line(file_name, text_to_append):
"""Append given text as a new line at the end of file"""
# Open the file in append & read mode ('a+')
with open(file_name, "a+") as file_object:
# Move read cursor to the start of file.
file_object.seek(0)
# If file is not empty then append '\n'
data = file_object.read(100)
if len(data) > 0:
file_object.write("\n")
# Append text at the end of file
file_object.write(text_to_append)
def find_nearest(array, value):
array = np.array(array)
idx = np.argmin((np.abs(array - value)), axis=0)
return idx
def train_splitter(model, optim):
model.train()
l_tot = 0
x, y = datagen.generate_cINN_splitter3(
config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_signals_recurrent_matching(model, optim):
model.train()
l_tot = 0
x, y = datagen.get_signals_recurrent_matching_batch(
config.batch_size * config.n_iterations)
for i in range(len(x)):
if x[i].shape[0] >= config.batch_size:
beg = 0
end = config.batch_size
for j in range(len(x[i])//config.batch_size):
optim.zero_grad()
x_i = x[i][beg:end].to(device)
y_i = y[i][beg:end].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (config.batch_size / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
beg += config.batch_size
end += config.batch_size
if (len(x[i])//config.batch_size) * config.batch_size != len(x[i]):
optim.zero_grad()
x_i = x[i][beg:].to(device)
y_i = y[i][beg:].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += ((len(x[i]) - beg) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
else:
optim.zero_grad()
x_i = x[i].to(device)
y_i = y[i].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (len(x[i]) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
return l_tot
def train_signals_recurrent(model, optim):
model.train()
l_tot = 0
x, y = datagen.get_signals_recurrent_batch(
config.batch_size * config.n_iterations)
for i in range(len(x)):
if x[i].shape[0] >= config.batch_size:
beg = 0
end = config.batch_size
for j in range(len(x[i])//config.batch_size):
optim.zero_grad()
x_i = x[i][beg:end].to(device)
y_i = y[i][beg:end].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (config.batch_size / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
beg += config.batch_size
end += config.batch_size
if (len(x[i])//config.batch_size) * config.batch_size != len(x[i]):
optim.zero_grad()
x_i = x[i][beg:].to(device)
y_i = y[i][beg:].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += ((len(x[i]) - beg) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
else:
optim.zero_grad()
x_i = x[i].to(device)
y_i = y[i].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (len(x[i]) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
return l_tot
def train_signals(model, optim):
model.train()
l_tot = 0
x, y = datagen.get_signals_batch(config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_signals_matching(model, optim):
model.train()
l_tot = 0
x, y = datagen.get_signals_matching_batch(
config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_signals_sequence(model, optim):
model.train()
l_tot = 0
x, y = datagen.get_signals_sequence_batch(
config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_matching_old(model, optim):
model.train()
l_tot = 0
x, y = datagen.generate_matching_batch_old(
config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_seq_old(model, optim):
model.train()
l_tot = 0
x, y = datagen.generate_seq_batch_old(
config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_cINN_old(model, optim):
model.train()
l_tot = 0
x, y = datagen.generate_cINN_batch_old(
config.batch_size * config.n_iterations)
for i in range(0, len(x), config.batch_size):
x_i = x[i:i+config.batch_size].to(device)
y_i = y[i:i+config.batch_size].to(device)
optim.zero_grad()
z_i = model(x_i, c=y_i)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(0.5 * torch.sum(z_i**2, dim=1) - log_jac)
l_tot += loss.data.item()
loss.backward()
optim.step()
return l_tot/config.n_iterations
def train_rcINN_matching_old(model, optim):
model.train()
l_tot = 0
x, y = datagen.generate_rcINN_matching_batch_old(
config.batch_size * config.n_iterations)
for i in range(len(x)):
if x[i].shape[0] >= config.batch_size:
beg = 0
end = config.batch_size
for j in range(len(x[i])//config.batch_size):
optim.zero_grad()
x_i = x[i][beg:end].to(device)
y_i = y[i][beg:end].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (config.batch_size / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
beg += config.batch_size
end += config.batch_size
if (len(x[i])//config.batch_size) * config.batch_size != len(x[i]):
optim.zero_grad()
x_i = x[i][beg:].to(device)
y_i = y[i][beg:].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += ((len(x[i]) - beg) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
else:
optim.zero_grad()
x_i = x[i].to(device)
y_i = y[i].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (len(x[i]) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
return l_tot
def train_rcINN_old(model, optim):
model.train()
l_tot = 0
x, y = datagen.generate_rcINN_batch_old(
config.batch_size * config.n_iterations)
for i in range(len(x)):
if x[i].shape[0] >= config.batch_size:
beg = 0
end = config.batch_size
for j in range(len(x[i])//config.batch_size):
optim.zero_grad()
x_i = x[i][beg:end].to(device)
y_i = y[i][beg:end].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (config.batch_size / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
beg += config.batch_size
end += config.batch_size
if (len(x[i])//config.batch_size) * config.batch_size != len(x[i]):
optim.zero_grad()
x_i = x[i][beg:].to(device)
y_i = y[i][beg:].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += ((len(x[i]) - beg) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
else:
optim.zero_grad()
x_i = x[i].to(device)
y_i = y[i].to(device)
z_i = model(x_i, c=y_i, recurrent=True)
log_jac = model.log_jacobian(run_forward=False)
loss = torch.mean(
torch.sum(0.5 * torch.sum(z_i**2, dim=1) - log_jac, dim=1))
l_tot += (len(x[i]) / (config.batch_size *
config.n_iterations)) * loss.data.item()
loss.backward()
optim.step()
return l_tot
def print_stats(filtered_bp, true_stats, name, signals=False, splitter=False):
y_true = true_stats[1]
n_Rwaves = true_stats[2]
bp = []
for i in range(len(filtered_bp)):
for j in range(len(filtered_bp[i][0])):
if signals:
bp.append([[filtered_bp[i][0][j]], [
filtered_bp[i][1][j]], filtered_bp[i][2], filtered_bp[i][3]])
else:
bp.append([[filtered_bp[i][0][j]], [
filtered_bp[i][1][j]], filtered_bp[i][2]])
intervals = []
if not splitter:
for i in range(len(bp)):
itv_sub = []
for j in range(len(bp[i][0])):
if bp[i][1][j] == "1":
itv = simulate_type_1(
bp[i][0][j][1], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "2a":
itv = simulate_type_2a(
bp[i][0][j][1], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "2b":
itv = simulate_type_2b(
bp[i][0][j][1], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "2c":
itv = simulate_type_2c(
bp[i][0][j][0], bp[i][0][j][1], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "3":
itv = simulate_type_3(
bp[i][0][j][0], bp[i][0][j][1], bp[i][0][j][2], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
intervals.append(itv_sub)
if splitter:
for i in range(len(bp)):
itv_sub = []
for j in range(len(bp[i][0])):
if bp[i][1][j] == "1":
itv = simulate_type_1(
bp[i][0][j][0], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "2a":
itv = simulate_type_2a(
bp[i][0][j][0], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "2b":
itv = simulate_type_2b(
bp[i][0][j][0], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "2c":
itv = simulate_type_2c(
bp[i][0][j][0], bp[i][0][j][1], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if bp[i][1][j] == "3":
itv = simulate_type_3(
bp[i][0][j][0], bp[i][0][j][1], bp[i][0][j][2], bp[i][2][0], bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
intervals.append(itv_sub)
differences = []
indeces = []
for i in range(len(intervals)):
for j in range(len(intervals[i])):
differences.append(
np.mean(np.abs(intervals[i][j]-y_true[:(n_Rwaves-1)])))
indeces.append([i, j])
differences = np.array(differences)
indeces = np.array(indeces)
idx = np.argsort(differences)
differences = differences[idx]
indeces = indeces[idx]
for i in range(len(differences[:10])):
append_new_line(
f'top10_sol_{name}.txt', "--------------------------")
append_new_line(f'top10_sol_{name}.txt', str(
intervals[indeces[i][0]][indeces[i][1]]))
append_new_line(
f'top10_sol_{name}.txt', str(differences[i]))
append_new_line(f'top10_sol_{name}.txt', str(
bp[indeces[i][0]][0][indeces[i][1]]))
append_new_line(f'top10_sol_{name}.txt', str(
bp[indeces[i][0]][1][indeces[i][1]]))
append_new_line(f'top10_sol_{name}.txt', str(
bp[indeces[i][0]][2]))
if signals:
append_new_line(f'top10_sol_{name}.txt', str(
bp[indeces[i][0]][3]))
append_new_line(
f'top10_sol_{name}.txt', "--------------------------")
append_new_line(
f'top10_sol_{name}.txt', "================================================")
append_new_line(
f'top10_sol_{name}.txt', str(y_true))
append_new_line(
f'top10_sol_{name}.txt', "================================================")
def make_stats(filtered_bp, true_stats, name, signals=False, splitter=False):
y_true = true_stats[1]
n_Rwaves = true_stats[2]
atrial_cycle_length = true_stats[3]
conduction_constant = true_stats[4]
block_pattern_true = true_stats[5]
block_type = true_stats[6]
counter1 = 0
counter2a = 0
counter2b = 0
counter2c = 0
counter3 = 0
same_bp = []
alt_bp = []
for i in range(len(filtered_bp)):
for j in range(len(filtered_bp[i][0])):
if filtered_bp[i][1][j] == block_type:
if signals:
same_bp.append([[filtered_bp[i][0][j]], [
filtered_bp[i][1][j]], filtered_bp[i][2], filtered_bp[i][3]])
else:
same_bp.append([[filtered_bp[i][0][j]], [
filtered_bp[i][1][j]], filtered_bp[i][2]])
else:
if signals:
alt_bp.append([[filtered_bp[i][0][j]], [
filtered_bp[i][1][j]], filtered_bp[i][2], filtered_bp[i][3]])
else:
alt_bp.append([[filtered_bp[i][0][j]], [
filtered_bp[i][1][j]], filtered_bp[i][2]])
if filtered_bp[i][1][j] == "1":
counter1 += 1
if filtered_bp[i][1][j] == "2a":
counter2a += 1
if filtered_bp[i][1][j] == "2b":
counter2b += 1
if filtered_bp[i][1][j] == "2c":
counter2c += 1
if filtered_bp[i][1][j] == "3":
counter3 += 1
ratio1 = 0
ratio2a = 0
ratio2b = 0
ratio2c = 0
ratio3 = 0
if len(alt_bp) > 0:
ratio1 = counter1 / len(alt_bp)
ratio2a = counter2a / len(alt_bp)
ratio2b = counter2b / len(alt_bp)
ratio2c = counter2c / len(alt_bp)
ratio3 = counter3 / len(alt_bp)
intervals_same = []
intervals_alt = []
if not splitter:
for i in range(len(same_bp)):
itv_sub = []
for j in range(len(same_bp[i][0])):
if same_bp[i][1][j] == "1":
itv = simulate_type_1(
same_bp[i][0][j][1], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "2a":
itv = simulate_type_2a(
same_bp[i][0][j][1], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "2b":
itv = simulate_type_2b(
same_bp[i][0][j][1], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "2c":
itv = simulate_type_2c(
same_bp[i][0][j][0], same_bp[i][0][j][1], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "3":
itv = simulate_type_3(
same_bp[i][0][j][0], same_bp[i][0][j][1], same_bp[i][0][j][2], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
intervals_same.append(itv_sub)
for i in range(len(alt_bp)):
itv_sub = []
for j in range(len(alt_bp[i][0])):
if alt_bp[i][1][j] == "1":
itv = simulate_type_1(
alt_bp[i][0][j][1], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "2a":
itv = simulate_type_2a(
alt_bp[i][0][j][1], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "2b":
itv = simulate_type_2b(
alt_bp[i][0][j][1], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "2c":
itv = simulate_type_2c(
alt_bp[i][0][j][0], alt_bp[i][0][j][1], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "3":
itv = simulate_type_3(
alt_bp[i][0][j][0], alt_bp[i][0][j][1], alt_bp[i][0][j][2], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
intervals_alt.append(itv_sub)
if splitter:
for i in range(len(same_bp)):
itv_sub = []
for j in range(len(same_bp[i][0])):
if same_bp[i][1][j] == "1":
itv = simulate_type_1(
same_bp[i][0][j][0], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "2a":
itv = simulate_type_2a(
same_bp[i][0][j][0], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "2b":
itv = simulate_type_2b(
same_bp[i][0][j][0], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "2c":
itv = simulate_type_2c(
same_bp[i][0][j][0], same_bp[i][0][j][1], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if same_bp[i][1][j] == "3":
itv = simulate_type_3(
same_bp[i][0][j][0], same_bp[i][0][j][1], same_bp[i][0][j][2], same_bp[i][2][0], same_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
intervals_same.append(itv_sub)
for i in range(len(alt_bp)):
itv_sub = []
for j in range(len(alt_bp[i][0])):
if alt_bp[i][1][j] == "1":
itv = simulate_type_1(
alt_bp[i][0][j][0], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "2a":
itv = simulate_type_2a(
alt_bp[i][0][j][0], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "2b":
itv = simulate_type_2b(
alt_bp[i][0][j][0], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "2c":
itv = simulate_type_2c(
alt_bp[i][0][j][0], alt_bp[i][0][j][1], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
if alt_bp[i][1][j] == "3":
itv = simulate_type_3(
alt_bp[i][0][j][0], alt_bp[i][0][j][1], alt_bp[i][0][j][2], alt_bp[i][2][0], alt_bp[i][2][1])
itv_sub.append(itv[:(n_Rwaves-1)])
intervals_alt.append(itv_sub)
differences_same = []
indeces_same = []
differences_alt = []
indeces_alt = []
for i in range(len(intervals_same)):
for j in range(len(intervals_same[i])):
differences_same.append(
np.mean(np.abs(intervals_same[i][j]-y_true[:(n_Rwaves-1)])))
indeces_same.append([i, j])
for i in range(len(intervals_alt)):
for j in range(len(intervals_alt[i])):
differences_alt.append(
np.mean(np.abs(intervals_alt[i][j]-y_true[:(n_Rwaves-1)])))
indeces_alt.append([i, j])
differences_same = np.array(differences_same)
indeces_same = np.array(indeces_same)
differences_alt = np.array(differences_alt)
indeces_alt = np.array(indeces_alt)
idx_same = np.argsort(differences_same)
differences_same = differences_same[idx_same]
indeces_same = indeces_same[idx_same]
idx_alt = np.argsort(differences_alt)
differences_alt = differences_alt[idx_alt]
indeces_alt = indeces_alt[idx_alt]
if len(differences_alt) + len(differences_same) > 0:
alt_same_ratio = len(differences_alt) / \
(len(differences_alt) + len(differences_same))
append_new_line(f'TEST_alt_same_ratio_{name}.txt', str(alt_same_ratio))
append_new_line(f'TEST_block_type_ratios_{name}.txt', str(ratio1))
append_new_line(f'TEST_block_type_ratios_{name}.txt', str(ratio2a))
append_new_line(f'TEST_block_type_ratios_{name}.txt', str(ratio2b))
append_new_line(f'TEST_block_type_ratios_{name}.txt', str(ratio2c))
append_new_line(f'TEST_block_type_ratios_{name}.txt', str(ratio3))
if len(differences_same) != 0:
append_new_line(
f'TEST_top1_diff_same_{name}.txt', str(differences_same[0]))
if len(differences_alt) != 0:
append_new_line(
f'TEST_top1_diff_alt_{name}.txt', str(differences_alt[0]))
if alt_bp[indeces_alt[0][0]][1][indeces_alt[0][1]] == "1":
append_new_line(f'TEST_top_alt_block_type_{name}.txt', str(0))
if alt_bp[indeces_alt[0][0]][1][indeces_alt[0][1]] == "2a":
append_new_line(f'TEST_top_alt_block_type_{name}.txt', str(1))
if alt_bp[indeces_alt[0][0]][1][indeces_alt[0][1]] == "2b":
append_new_line(f'TEST_top_alt_block_type_{name}.txt', str(2))
if alt_bp[indeces_alt[0][0]][1][indeces_alt[0][1]] == "2c":
append_new_line(f'TEST_top_alt_block_type_{name}.txt', str(3))
if alt_bp[indeces_alt[0][0]][1][indeces_alt[0][1]] == "3":
append_new_line(f'TEST_top_alt_block_type_{name}.txt', str(4))
for i in range(len(differences_same[:5])):
append_new_line(
f'TEST_top10_sol_same_{name}.txt', "--------------------------")
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(
intervals_same[indeces_same[i][0]][indeces_same[i][1]]))
append_new_line(
f'TEST_top10_sol_same_{name}.txt', str(differences_same[i]))
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(
same_bp[indeces_same[i][0]][0][indeces_same[i][1]]))
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(
same_bp[indeces_same[i][0]][1][indeces_same[i][1]]))
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(
same_bp[indeces_same[i][0]][2]))
if signals:
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(
same_bp[indeces_same[i][0]][3]))
append_new_line(
f'TEST_top10_sol_same_{name}.txt', "--------------------------")
for i in range(len(differences_alt[:5])):
append_new_line(
f'TEST_top10_sol_alt_{name}.txt', "--------------------------")
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(
intervals_alt[indeces_alt[i][0]][indeces_alt[i][1]]))
append_new_line(
f'TEST_top10_sol_alt_{name}.txt', str(differences_alt[i]))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(
alt_bp[indeces_alt[i][0]][0][indeces_alt[i][1]]))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(
alt_bp[indeces_alt[i][0]][1][indeces_alt[i][1]]))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(
alt_bp[indeces_alt[i][0]][2]))
if signals:
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(
alt_bp[indeces_alt[i][0]][3]))
append_new_line(
f'TEST_top10_sol_alt_{name}.txt', "--------------------------")
if signals:
block_pattern_s = datagen.correct_bp(
block_pattern_true, block_type, n_Rwaves)
signals_true = np.array(datagen.bp_to_signals(
block_pattern_s, block_type, n_Rwaves, fill=False), dtype='int16')
append_new_line(f'TEST_top10_sol_same_{name}.txt',
"=====================================================")
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(
y_true[:(n_Rwaves-1)]))
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(n_Rwaves))
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(block_type))
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(block_pattern_true))
append_new_line(
f'TEST_top10_sol_same_{name}.txt', str(atrial_cycle_length))
append_new_line(
f'TEST_top10_sol_same_{name}.txt', str(conduction_constant))
if signals:
append_new_line(f'TEST_top10_sol_same_{name}.txt', str(signals_true))
append_new_line(f'TEST_top10_sol_same_{name}.txt',
"=====================================================")
append_new_line(f'TEST_top10_sol_alt_{name}.txt',
"=====================================================")
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(
y_true[:(n_Rwaves-1)]))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(n_Rwaves))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(block_type))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(block_pattern_true))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(atrial_cycle_length))
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(conduction_constant))
if signals:
append_new_line(f'TEST_top10_sol_alt_{name}.txt', str(signals_true))
append_new_line(f'TEST_top10_sol_alt_{name}.txt',
"=====================================================")
def process_matching(data):
output_matching = data[0]
n_Rwaves = data[1]
sequence_number = data[2]
matchings = []
for i in range(len(output_matching)):
matching_re = output_matching[i].reshape((24, 8))
sub = []
for j in range(len(matching_re)):
nearest = find_nearest(matching_re[j], 1)
if np.abs(matching_re[j][nearest] - 1) < 0.5:
sub.append(nearest + 2)
else:
sub.append(0)
if (len(np.where(np.array(sub[n_Rwaves - 1:]) == 0)[0]) != len(sub[n_Rwaves - 1:]) or
len(np.where(np.array(sub[:n_Rwaves - 1]) == 0)[0]) != 0):
continue
matchings.append(sub[:n_Rwaves - 1])
matchings_converted = []
for i in range(len(matchings)):
counter = 0
sub_array = [0]
for j in range(len(matchings[i])):
counter += matchings[i][j] - 1
sub_array.append(counter)
matchings_converted.append(sub_array)
return (matchings_converted, sequence_number)
def get_outputs_signals_recurrent_matching(true_stats, mp=True):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder / "aa_mean_est.csv")
aa_std = np.loadtxt(constants_folder / "aa_std_est.csv")
cc_mean = np.loadtxt(constants_folder / "cc_mean_est.csv")
cc_std = np.loadtxt(constants_folder / "cc_std_est.csv")
y_mean = np.loadtxt(constants_folder / "y_mean_est.csv")
y_std = np.loadtxt(constants_folder / "y_std_est.csv")
cond_mean = np.loadtxt(constants_folder / "cond_signals_mean_est.csv")
cond_std = np.loadtxt(constants_folder / "cond_signals_std_est.csv")
config.n_x_features = 5
config.n_cond_features = 2
config.rnn_layers = 2
config.hidden_size = 32
model_rcINN, optim_rcINN, weight_scheduler_rcINN = Model.generate_rcINN_old()
config.n_x_features = 194
config.n_cond_features = 24
config.n_hidden_layer_size = 512
model_seq, optim_seq, weight_scheduler_seq = Model.generate_cINN_old()
config.n_x_features = 192
model_matching, optim_matching, weight_scheduler_matching = Model.generate_cINN_old()
Model.load(networks_folder / "model_signals_rcINN_matching.pth",
optim_rcINN, model_rcINN)
Model.load(networks_folder / "model_signals_sequence.pth",
optim_seq, model_seq)
Model.load(networks_folder / "model_signals_matching.pth",
optim_matching, model_matching)
model_rcINN.eval()
model_seq.eval()
model_matching.eval()
seq_len_total = []
for stat in true_stats:
y_seq = np.stack([(stat[0] - y_mean) / y_std]*1000, axis=0)
z_seq = np.random.randn(1000, 194)
y_seq = torch.tensor(y_seq, dtype=torch.float32)
z_seq = torch.tensor(z_seq, dtype=torch.float32)
y_seq = y_seq.to(device)
z_seq = z_seq.to(device)
output_seq = model_seq(z_seq, c=y_seq, rev=True)
output_seq = output_seq.cpu().detach()
seq_lengths = []
for i in range(len(output_seq)):
seq_lengths.append(find_nearest(output_seq[i], 1) + 6)
seq_len_total.append(seq_lengths)
final_lengths = []
for lens in seq_len_total:
occurence = []
len_track = []
for len_i in lens:
if len_i not in len_track:
len_track.append(len_i)
counter = 0
for len_j in lens:
if len_j == len_i:
counter += 1
occurence.append(counter)
occurence = np.array(occurence)
len_track = np.array(len_track)
idx_occ = np.argsort(occurence)[::-1]
len_track = len_track[idx_occ]
final_lengths.append(len_track[:10])
match_totals = []
for k in range(len(true_stats)):
y_matching = np.stack(
[(true_stats[k][0] - y_mean) / y_std]*5000, axis=0)
z_matching = np.random.randn(5000, 192)
y_matching = torch.tensor(y_matching, dtype=torch.float32)
z_matching = torch.tensor(z_matching, dtype=torch.float32)
y_matching = y_matching.to(device)
z_matching = z_matching.to(device)
output_matching = model_matching(
z_matching, c=y_matching, rev=True)
output_matching = output_matching.cpu().detach()
match_totals.append((output_matching, true_stats[k][2], k))
if mp:
with Pool(os.cpu_count() - 1) as pool:
processed_matches = pool.map(process_matching, match_totals)
processed_matches = sorted(processed_matches, key=lambda x: x[1])
else:
processed_matches = []
processed_matches.append(process_matching(match_totals[0]))
final_matches = []
for matches in processed_matches:
occurence = []
match_track = []
for match_i in matches[0]:
if match_i not in match_track:
match_track.append(match_i)
counter = 0
for match_j in matches[0]:
if match_j == match_i:
counter += 1
occurence.append(counter)
occurence = np.array(occurence)
match_track = np.array(match_track)
idx_occ = np.argsort(occurence)[::-1]
match_track = match_track[idx_occ]
final_matches.append(match_track)
paired_totals = []
for k in range(len(final_lengths)):
paired_matchings = []
for i in range(len(final_lengths[k])):
sub_match = []
for j in range(len(final_matches[k])):
if (final_matches[k][j][-1] == final_lengths[k][i] - 1 or
final_matches[k][j][-1] + 1 == final_lengths[k][i] - 1 or
final_matches[k][j][-1] + 2 == final_lengths[k][i] - 1 or
final_matches[k][j][-1] + 3 == final_lengths[k][i] - 1 or
final_matches[k][j][-1] + 4 == final_lengths[k][i] - 1 or
final_matches[k][j][-1] + 5 == final_lengths[k][i] - 1 or
final_matches[k][j][-1] + 6 == final_lengths[k][i] - 1):
sub_match.append(final_matches[k][j])
paired_matchings.append((final_lengths[k][i], sub_match))
paired_totals.append(paired_matchings)
outputs = []
for k in range(len(paired_totals)):
y_rcINN = []
z_rcINN = []
for i in range(len(paired_totals[k])):
if len(paired_totals[k][i][1]) == 0:
continue
z_stack = []
cond_stack = []
counter = 0
for j in range(len(paired_totals[k][i][1])):
z_stack.append(np.random.randn(
50, 5, paired_totals[k][i][0]))
cond_stack.append(np.stack([datagen.y_to_cond(
paired_totals[k][i][1][j], paired_totals[k][i][0], true_stats[k][0])]*50, axis=0))
counter += 1
if counter == 10:
break
z_stack = np.concatenate(z_stack, axis=0)
cond_stack = np.concatenate(cond_stack, axis=0)
y_rcINN.append(cond_stack)
z_rcINN.append(z_stack)
for i in range(len(y_rcINN)):
y_rcINN[i] = torch.tensor(y_rcINN[i], dtype=torch.float32)
y_rcINN[i][:, 0, :] = (
y_rcINN[i][:, 0, :] - cond_mean[0]) / cond_std[0]
y_rcINN[i][:, 1, :] = (
y_rcINN[i][:, 1, :] - cond_mean[1]) / cond_std[1]
y_rcINN[i] = torch.tensor(y_rcINN[i], dtype=torch.float32)
z_rcINN[i] = torch.tensor(z_rcINN[i], dtype=torch.float32)
big_output = []
for i in range(len(y_rcINN)):
y_rcINN[i] = y_rcINN[i].to(device)
z_rcINN[i] = z_rcINN[i].to(device)
output_rcINN = model_rcINN(
z_rcINN[i], c=y_rcINN[i], rev=True, recurrent=True)
output_rcINN = output_rcINN.cpu().detach()
output_rcINN[:, -2, :] = output_rcINN[:, -2, :] * \
aa_std + aa_mean
output_rcINN[:, -1, :] = output_rcINN[:, -1, :] * \
cc_std + cc_mean
big_output.append(output_rcINN)
outputs.append(big_output)
return outputs
def process_outputs_signals_recurrent_matching(big_output, true_stats):
n_Rwaves = true_stats[2]
filtered_bp = []
for i in range(len(big_output)):
for j in range(len(big_output[i])):
output_ij = np.array(big_output[i][j])
aa_ij = float(np.mean(output_ij[-2]))
cc_ij = float(np.mean(output_ij[-1]))
if aa_ij < 188 or aa_ij > 400 or cc_ij < 1 or cc_ij > aa_ij:
continue
signals = output_ij[:3, :]
for k in range(signals.shape[0]):
for l in range(signals.shape[1]):
dist0 = abs(signals[k][l])
dist1 = abs(signals[k][l] - 1)
if dist0 < dist1:
signals[k][l] = 0
if dist0 > dist1:
signals[k][l] = 1
if dist0 == dist1:
signals[k][l] = random.choice([0, 1])
lvl1 = signals[0]
lvl2 = signals[1]
lvl3 = signals[2]
idx_1 = np.where(np.array(lvl1) == 1)[0]
idx_2 = np.where(np.array(lvl2) == 1)[0]
idx_3 = np.where(np.array(lvl3) == 1)[0]
relevant_1 = np.array(lvl1)[:idx_1[-1]+1]
relevant_2 = np.array(lvl2)[idx_1]
relevant_3 = np.array(lvl3)[idx_2]
id0_1 = np.where(np.array(relevant_1) == 0)[0]
id0_2 = np.where(np.array(relevant_2) == 0)[0]
id0_3 = np.where(np.array(relevant_3) == 0)[0]
diff1 = abs(id0_1[:-1] - id0_1[1:])
diff2 = abs(id0_2[:-1] - id0_2[1:])
diff3 = abs(id0_3[:-1] - id0_3[1:])
if len(id0_1) > 1:
if min(diff1) <= 1:
continue
if len(id0_2) > 1:
if min(diff2) <= 1:
continue
if len(id0_3) > 1:
if min(diff3) <= 1:
continue
if idx_2[-1] > idx_1[-1]:
continue
if idx_3[-1] > idx_2[-1]:
continue
res = datagen.signals_to_bp([lvl1, lvl2, lvl3], n_Rwaves)
for res_i in res:
if len(res_i) != 0:
filtered_bp.append([res_i[0], res_i[1], [float(aa_ij), float(
cc_ij)], np.array([lvl1, lvl2, lvl3], dtype='int16')])
return (filtered_bp, true_stats)
def get_outputs_signals_recurrent(true_stats):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder / "aa_mean_est.csv")
aa_std = np.loadtxt(constants_folder / "aa_std_est.csv")
cc_mean = np.loadtxt(constants_folder / "cc_mean_est.csv")
cc_std = np.loadtxt(constants_folder / "cc_std_est.csv")
y_mean = np.loadtxt(constants_folder / "y_mean_est.csv")
y_std = np.loadtxt(constants_folder / "y_std_est.csv")
config.n_x_features = 5
config.n_cond_features = 24
config.rnn_layers = 2
config.hidden_size = 64
model_rcINN, optim_rcINN, weight_scheduler_rcINN = Model.generate_rcINN_old()
config.n_x_features = 194
config.n_hidden_layer_size = 512
model_seq, optim_seq, weight_scheduler_seq = Model.generate_cINN_old()
Model.load(networks_folder / "model_signals_rcINN.pth",
optim_rcINN, model_rcINN)
Model.load(networks_folder / "model_signals_sequence.pth",
optim_seq, model_seq)
model_rcINN.eval()
model_seq.eval()
seq_len_total = []
for stat in true_stats:
y_seq = np.stack([(stat[0] - y_mean) / y_std]*1000, axis=0)
z_seq = np.random.randn(1000, 194)
y_seq = torch.tensor(y_seq, dtype=torch.float32)
z_seq = torch.tensor(z_seq, dtype=torch.float32)
y_seq = y_seq.to(device)
z_seq = z_seq.to(device)
output_seq = model_seq(z_seq, c=y_seq, rev=True)
output_seq = output_seq.cpu().detach()
seq_lengths = []
for i in range(len(output_seq)):
seq_lengths.append(find_nearest(output_seq[i], 1) + 6)
seq_len_total.append(seq_lengths)
final_lengths = []
for lens in seq_len_total:
occurence = []
len_track = []
for len_i in lens:
if len_i not in len_track:
len_track.append(len_i)
counter = 0
for len_j in lens:
if len_j == len_i:
counter += 1
occurence.append(counter)
occurence = np.array(occurence)
len_track = np.array(len_track)
idx_occ = np.argsort(occurence)[::-1]
len_track = len_track[idx_occ]
final_lengths.append(len_track[:10])
outputs = []
for j in range(len(final_lengths)):
big_output = []
for seq_len in final_lengths[j]:
y_array = np.stack(
[(true_stats[j][0] - y_mean) / y_std]*seq_len, axis=1)
y_array = np.stack([y_array]*500, axis=0)
z_array = np.random.randn(500, 5, seq_len)
y_array = torch.tensor(y_array, dtype=torch.float32)
z_array = torch.tensor(z_array, dtype=torch.float32)
y_array = y_array.to(device)
z_array = z_array.to(device)
output = model_rcINN(z_array, c=y_array,
rev=True, recurrent=True)
output = output.cpu().detach()
output[:, -2, :] = output[:, -2, :] * aa_std + aa_mean
output[:, -1, :] = output[:, -1, :] * cc_std + cc_mean
big_output.append(output)
outputs.append(big_output)
return outputs
def process_outputs_signals_recurrent(big_output, true_stats):
n_Rwaves = true_stats[2]
filtered_bp = []
for i in range(len(big_output)):
for j in range(len(big_output[i])):
output_ij = np.array(big_output[i][j])
aa_ij = float(np.mean(output_ij[-2]))
cc_ij = float(np.mean(output_ij[-1]))
if aa_ij < 188 or aa_ij > 400 or cc_ij < 1 or cc_ij > aa_ij:
continue
signals = output_ij[:3, :]
for k in range(signals.shape[0]):
for l in range(signals.shape[1]):
dist0 = abs(signals[k][l])
dist1 = abs(signals[k][l] - 1)
if dist0 < dist1:
signals[k][l] = 0
if dist0 > dist1:
signals[k][l] = 1
if dist0 == dist1:
signals[k][l] = random.choice([0, 1])
lvl1 = signals[0]
lvl2 = signals[1]
lvl3 = signals[2]
idx_1 = np.where(np.array(lvl1) == 1)[0]
idx_2 = np.where(np.array(lvl2) == 1)[0]
idx_3 = np.where(np.array(lvl3) == 1)[0]
relevant_1 = np.array(lvl1)[:idx_1[-1]+1]
relevant_2 = np.array(lvl2)[idx_1]
relevant_3 = np.array(lvl3)[idx_2]
id0_1 = np.where(np.array(relevant_1) == 0)[0]
id0_2 = np.where(np.array(relevant_2) == 0)[0]
id0_3 = np.where(np.array(relevant_3) == 0)[0]
diff1 = abs(id0_1[:-1] - id0_1[1:])
diff2 = abs(id0_2[:-1] - id0_2[1:])
diff3 = abs(id0_3[:-1] - id0_3[1:])
if len(id0_1) > 1:
if min(diff1) <= 1:
continue
if len(id0_2) > 1:
if min(diff2) <= 1:
continue
if len(id0_3) > 1:
if min(diff3) <= 1:
continue
if idx_2[-1] > idx_1[-1]:
continue
if idx_3[-1] > idx_2[-1]:
continue
res = datagen.signals_to_bp([lvl1, lvl2, lvl3], n_Rwaves)
for res_i in res:
if len(res_i) != 0:
filtered_bp.append([res_i[0], res_i[1], [float(aa_ij), float(
cc_ij)], np.array([lvl1, lvl2, lvl3], dtype='int16')])
return (filtered_bp, true_stats)
def get_outputs_signals(true_stats):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder / "aa_mean_est.csv")
aa_std = np.loadtxt(constants_folder / "aa_std_est.csv")
cc_mean = np.loadtxt(constants_folder / "cc_mean_est.csv")
cc_std = np.loadtxt(constants_folder / "cc_std_est.csv")
y_mean = np.loadtxt(constants_folder / "y_mean_est.csv")
y_std = np.loadtxt(constants_folder / "y_std_est.csv")
config.n_x_features = 602
config.n_cond_features = 24
config.n_hidden_layer_size = 1024
model, optim, weight_scheduler = Model.generate_cINN_old()
Model.load(networks_folder / "model_signals_cINN.pth", optim, model)
model.eval()
outputs = []
for stat in true_stats:
y_array = np.stack([(stat[0] - y_mean) / y_std]*5000, axis=0)
z_array = np.random.randn(5000, 602)
y_array = torch.tensor(y_array, dtype=torch.float32)
z_array = torch.tensor(z_array, dtype=torch.float32)
y_array = y_array.to(device)
z_array = z_array.to(device)
big_output = model(z_array, c=y_array, rev=True)
big_output = big_output.cpu().detach()
big_output[:, -2] = big_output[:, -2] * aa_std + aa_mean
big_output[:, -1] = big_output[:, -1] * cc_std + cc_mean
outputs.append(big_output)
return outputs
def process_outputs_signals(big_output, true_stats):
n_Rwaves = true_stats[2]
aa = np.array(big_output[:, -2])
cc = np.array(big_output[:, -1])
filtered_bp = []
for i in range(len(big_output)):
if aa[i] < 188 or aa[i] > 400 or cc[i] < 1 or cc[i] > aa[i]:
continue
signals = np.array(big_output[i][0:600])
for k in range(len(signals)):
dist0 = abs(signals[k])
dist1 = abs(signals[k] - 1)
if dist0 < dist1:
signals[k] = 0
if dist0 > dist1:
signals[k] = 1
if dist0 == dist1:
signals[k] = random.choice([0, 1])
lvl1 = signals[0:200]
lvl2 = signals[200:400]
lvl3 = signals[400:600]
idx_1 = np.where(np.array(lvl1) == 1)[0]
idx_2 = np.where(np.array(lvl2) == 1)[0]
idx_3 = np.where(np.array(lvl3) == 1)[0]
relevant_1 = np.array(lvl1)[:idx_1[-1]+1]
relevant_2 = np.array(lvl2)[idx_1]
relevant_3 = np.array(lvl3)[idx_2]
id0_1 = np.where(np.array(relevant_1) == 0)[0]
id0_2 = np.where(np.array(relevant_2) == 0)[0]
id0_3 = np.where(np.array(relevant_3) == 0)[0]
diff1 = abs(id0_1[:-1] - id0_1[1:])
diff2 = abs(id0_2[:-1] - id0_2[1:])
diff3 = abs(id0_3[:-1] - id0_3[1:])
if len(id0_1) > 1:
if min(diff1) <= 1:
continue
if len(id0_2) > 1:
if min(diff2) <= 1:
continue
if len(id0_3) > 1:
if min(diff3) <= 1:
continue
if idx_2[-1] > idx_1[-1]:
continue
if idx_3[-1] > idx_2[-1]:
continue
lvl1 = lvl1[:idx_1[-1]+1]
lvl2 = lvl2[:idx_1[-1]+1]
lvl3 = lvl3[:idx_1[-1]+1]
res = datagen.signals_to_bp([lvl1, lvl2, lvl3], n_Rwaves)
for res_i in res:
if len(res_i) != 0:
filtered_bp.append([res_i[0], res_i[1], [float(aa[i]), float(
cc[i])], np.array([lvl1, lvl2, lvl3], dtype='int16')])
return (filtered_bp, true_stats)
def process_outputs_splitter(data):
big_output = data[0]
n_Rwaves = data[1][2]
splitter_type = data[2]
sequence_number = data[3]
aa = np.array(big_output[:, -2])
cc = np.array(big_output[:, -1])
output_bp = []
output_constants = []
for i in range(len(big_output)):
if aa[i] < 188 or aa[i] > 400 or cc[i] < 1 or cc[i] > aa[i]:
continue
if splitter_type == "1" or splitter_type == "2a":
x_re = big_output[:, :-2][i].reshape((25, 7))
if splitter_type == "2b":
x_re = big_output[:, :-2][i].reshape((50, 7))
if splitter_type == "2c":
x_re = big_output[:, :-2][i].reshape((75, 2))
if splitter_type == "3":
x_re = big_output[:, :-2][i].reshape((175, 2))
output_bp_sub = []
lvl1 = []
lvl2 = []
lvl3 = []
if splitter_type == "1" or splitter_type == "2a":
for j in range(25):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl2.append(nearest + 1)
else:
lvl2.append(-1)
if splitter_type == "2b":
for j in range(50):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl2.append(nearest + 1)
else:
lvl2.append(-1)
if splitter_type == "2c":
for j in range(50):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl1.append(nearest + 1)
else:
lvl1.append(-1)
for j in range(50, 75):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl2.append(nearest)
else:
lvl2.append(-1)
if splitter_type == "3":
for j in range(100):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl1.append(nearest + 1)
else:
lvl1.append(-1)
for j in range(100, 150):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl2.append(nearest)
else:
lvl2.append(-1)
for j in range(150, 175):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl3.append(nearest)
else:
lvl3.append(-1)
lvl1_elist = np.where(np.array(lvl1) == -1)[0]
lvl2_elist = np.where(np.array(lvl2) == -1)[0]
lvl3_elist = np.where(np.array(lvl3) == -1)[0]
lvl1_end = len(lvl1)
lvl2_end = len(lvl2)
lvl3_end = len(lvl3)
if len(lvl1_elist) != 0:
lvl1_end = lvl1_elist[0]
if len(np.where(np.array(lvl1[lvl1_end:]) == -1)[0]) != len(lvl1[lvl1_end:]):
continue
if len(lvl2_elist) != 0:
lvl2_end = lvl2_elist[0]
if len(np.where(np.array(lvl2[lvl2_end:]) == -1)[0]) != len(lvl2[lvl2_end:]):
continue
if len(lvl3_elist) != 0:
lvl3_end = lvl3_elist[0]
if len(np.where(np.array(lvl3[lvl3_end:]) == -1)[0]) != len(lvl3[lvl3_end:]):
continue
output_bp_sub.append(lvl1[:lvl1_end])
output_bp_sub.append(lvl2[:lvl2_end])
output_bp_sub.append(lvl3[:lvl3_end])
output_bp.append(output_bp_sub)
output_constants.append([float(aa[i]), float(cc[i])])
filtered_bp = []
for i in range(len(output_bp)):
bp, bp_type = datagen.check_block_pattern_splitter(
output_bp[i], n_Rwaves, splitter_type)
if len(bp) != 0:
filtered_bp.append([bp, bp_type, output_constants[i]])
return (filtered_bp, data[1], sequence_number)
def get_outputs_splitter(true_stats, splitter_type):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder /
f"aa_mean_splitter{splitter_type}_est.csv")
aa_std = np.loadtxt(constants_folder /
f"aa_std_splitter{splitter_type}_est.csv")
cc_mean = np.loadtxt(constants_folder /
f"cc_mean_splitter{splitter_type}_est.csv")
cc_std = np.loadtxt(constants_folder /
f"cc_std_splitter{splitter_type}_est.csv")
y_mean = np.loadtxt(constants_folder /
f"y_mean_splitter{splitter_type}_est.csv")
y_std = np.loadtxt(constants_folder /
f"y_std_splitter{splitter_type}_est.csv")
if splitter_type == "1" or splitter_type == "2a":
config.n_x_features = 177
config.n_hidden_layer_size = 512
if splitter_type == "2b" or splitter_type == "3":
config.n_x_features = 352
config.n_hidden_layer_size = 1024
if splitter_type == "2c":
config.n_x_features = 152
config.n_hidden_layer_size = 512
model, optim, weight_scheduler = Model.generate_cINN_old()
Model.load(networks_folder /
f"model_splitter{splitter_type}.pth", optim, model)
model.eval()
if splitter_type == "1" or splitter_type == "2a":
z_size = 177
if splitter_type == "2b" or splitter_type == "3":
z_size = 352
if splitter_type == "2c":
z_size = 152
outputs = []
for stat in true_stats:
y_array = np.stack([(stat[0] - y_mean) / y_std]*1000, axis=0)
z_array = np.random.randn(1000, z_size)
y_array = torch.tensor(y_array, dtype=torch.float32)
z_array = torch.tensor(z_array, dtype=torch.float32)
y_array = y_array.to(device)
z_array = z_array.to(device)
big_output = model(z_array, c=y_array, rev=True)
big_output = big_output.cpu().detach()
big_output[:, -2] = big_output[:, -2] * aa_std + aa_mean
big_output[:, -1] = big_output[:, -1] * cc_std + cc_mean
outputs.append(big_output)
return outputs
def get_outputs_bp(true_stats):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder / "aa_mean_est.csv")
aa_std = np.loadtxt(constants_folder / "aa_std_est.csv")
cc_mean = np.loadtxt(constants_folder / "cc_mean_est.csv")
cc_std = np.loadtxt(constants_folder / "cc_std_est.csv")
y_mean = np.loadtxt(constants_folder / "y_mean_est.csv")
y_std = np.loadtxt(constants_folder / "y_std_est.csv")
config.n_x_features = 1402
config.n_cond_features = 24
config.n_hidden_layer_size = 2048
model, optim, weight_scheduler = Model.generate_cINN_old()
Model.load(networks_folder / "model_bp_cINN.pth", optim, model)
model.eval()
outputs = []
for stat in true_stats:
y_array = np.stack([(stat[0] - y_mean) / y_std]*5000, axis=0)
z_array = np.random.randn(5000, 1402)
y_array = torch.tensor(y_array, dtype=torch.float32)
z_array = torch.tensor(z_array, dtype=torch.float32)
y_array = y_array.to(device)
z_array = z_array.to(device)
big_output = model(z_array, c=y_array, rev=True)
big_output = big_output.cpu().detach()
big_output[:, -2] = big_output[:, -2] * aa_std + aa_mean
big_output[:, -1] = big_output[:, -1] * cc_std + cc_mean
outputs.append(big_output)
return outputs
def process_outputs_bp(big_output, true_stats):
n_Rwaves = true_stats[2]
aa = np.array(big_output[:, -2])
cc = np.array(big_output[:, -1])
output_bp = []
output_constants = []
for i in range(len(big_output)):
if aa[i] < 188 or aa[i] > 400 or cc[i] < 1 or cc[i] > aa[i]:
continue
x_re = big_output[:, :-2][i].reshape((175, 8))
output_bp_sub = []
lvl1 = []
lvl2 = []
lvl3 = []
for j in range(100):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl1.append(nearest)
else:
lvl1.append(-1)
for j in range(100, 150):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl2.append(nearest)
else:
lvl2.append(-1)
for j in range(150, 175):
nearest = find_nearest(x_re[j], 1)
if np.abs(x_re[j][nearest] - 1) < 0.5:
lvl3.append(nearest)
else:
lvl3.append(-1)
lvl1_elist = np.where(np.array(lvl1) == -1)[0]
lvl2_elist = np.where(np.array(lvl2) == -1)[0]
lvl3_elist = np.where(np.array(lvl3) == -1)[0]
lvl1_end = len(lvl1)
lvl2_end = len(lvl2)
lvl3_end = len(lvl3)
if len(lvl1_elist) != 0:
lvl1_end = lvl1_elist[0]
if len(np.where(np.array(lvl1[lvl1_end:]) == -1)[0]) != len(lvl1[lvl1_end:]):
continue
if len(lvl2_elist) != 0:
lvl2_end = lvl2_elist[0]
if len(np.where(np.array(lvl2[lvl2_end:]) == -1)[0]) != len(lvl2[lvl2_end:]):
continue
if len(lvl3_elist) != 0:
lvl3_end = lvl3_elist[0]
if len(np.where(np.array(lvl3[lvl3_end:]) == -1)[0]) != len(lvl3[lvl3_end:]):
continue
output_bp_sub.append(lvl1[:lvl1_end])
output_bp_sub.append(lvl2[:lvl2_end])
output_bp_sub.append(lvl3[:lvl3_end])
output_bp.append(output_bp_sub)
output_constants.append([float(aa[i]), float(cc[i])])
filtered_bp = []
for i in range(len(output_bp)):
bp, bp_type = datagen.check_block_pattern_alt(output_bp[i], n_Rwaves)
if len(bp) != 0:
filtered_bp.append([bp, bp_type, output_constants[i]])
return (filtered_bp, true_stats)
def get_outputs_bp_recurrent(true_stats):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder / "aa_mean_est.csv")
aa_std = np.loadtxt(constants_folder / "aa_std_est.csv")
cc_mean = np.loadtxt(constants_folder / "cc_mean_est.csv")
cc_std = np.loadtxt(constants_folder / "cc_std_est.csv")
y_mean = np.loadtxt(constants_folder / "y_mean_est.csv")
y_std = np.loadtxt(constants_folder / "y_std_est.csv")
config.n_x_features = 26
config.n_cond_features = 24
config.rnn_layers = 2
config.hidden_size = 64
model_rcINN, optim_rcINN, weight_scheduler_rcINN = Model.generate_rcINN_old()
config.n_x_features = 188
config.n_hidden_layer_size = 512
model_seq, optim_seq, weight_scheduler_seq = Model.generate_cINN_old()
Model.load(networks_folder / "model_bp_rcINN.pth",
optim_rcINN, model_rcINN)
Model.load(networks_folder / "model_bp_sequence.pth",
optim_seq, model_seq)
model_rcINN.eval()
model_seq.eval()
seq_len_total = []
for stat in true_stats:
y_seq = np.stack([(stat[0] - y_mean) / y_std]*1000, axis=0)
z_seq = np.random.randn(1000, 188)
y_seq = torch.tensor(y_seq, dtype=torch.float32)
z_seq = torch.tensor(z_seq, dtype=torch.float32)
y_seq = y_seq.to(device)
z_seq = z_seq.to(device)
output_seq = model_seq(z_seq, c=y_seq, rev=True)
output_seq = output_seq.cpu().detach()
seq_lengths = []
for i in range(len(output_seq)):
seq_lengths.append(find_nearest(output_seq[i], 1) + 6)
seq_len_total.append(seq_lengths)
final_lengths = []
for lens in seq_len_total:
occurence = []
len_track = []
for len_i in lens:
if len_i not in len_track:
len_track.append(len_i)
counter = 0
for len_j in lens:
if len_j == len_i:
counter += 1
occurence.append(counter)
occurence = np.array(occurence)
len_track = np.array(len_track)
idx_occ = np.argsort(occurence)[::-1]
len_track = len_track[idx_occ]
final_lengths.append(len_track[:10])
outputs = []
for j in range(len(final_lengths)):
big_output = []
for seq_len in final_lengths[j]:
y_array = np.stack(
[(true_stats[j][0] - y_mean) / y_std]*seq_len, axis=1)
y_array = np.stack([y_array]*500, axis=0)
z_array = np.random.randn(500, 26, seq_len)
y_array = torch.tensor(y_array, dtype=torch.float32)
z_array = torch.tensor(z_array, dtype=torch.float32)
y_array = y_array.to(device)
z_array = z_array.to(device)
output = model_rcINN(z_array, c=y_array,
rev=True, recurrent=True)
output = output.cpu().detach()
output[:, -2, :] = output[:, -2, :] * aa_std + aa_mean
output[:, -1, :] = output[:, -1, :] * cc_std + cc_mean
big_output.append(output)
outputs.append(big_output)
return outputs
def process_outputs_bp_recurrent(outputs, true_stats):
n_Rwaves = true_stats[2]
filtered_bp = []
for i in range(len(outputs)):
for j in range(len(outputs[i])):
outputs_ij = np.array(outputs[i][j])
aa_i = float(np.mean(outputs_ij[-2]))
cc_i = float(np.mean(outputs_ij[-1]))
if aa_i < 188 or aa_i > 400 or cc_i < 1 or cc_i > aa_i:
continue
outputs_ij = outputs_ij.T
seq_i = []
for time_step in outputs_ij:
lvl1 = find_nearest(time_step[0:8], 1)
lvl2 = find_nearest(time_step[8:16], 1)
lvl3 = find_nearest(time_step[16:24], 1)
seq_i.append([lvl1, lvl2, lvl3])
bp_i = datagen.seq_to_block_pattern(seq_i)
bp_i_checked, bp_type = datagen.check_block_pattern_alt(
bp_i, n_Rwaves)
if len(bp_i_checked) != 0:
filtered_bp.append([bp_i_checked, bp_type, [aa_i, cc_i]])
return (filtered_bp, true_stats)
def get_outputs_bp_recurrent_matching(true_stats, mp=True):
with torch.no_grad():
aa_mean = np.loadtxt(constants_folder / "aa_mean_est.csv")
aa_std = np.loadtxt(constants_folder / "aa_std_est.csv")
cc_mean = np.loadtxt(constants_folder / "cc_mean_est.csv")
cc_std = np.loadtxt(constants_folder / "cc_std_est.csv")
y_mean = np.loadtxt(constants_folder / "y_mean_est.csv")
y_std = np.loadtxt(constants_folder / "y_std_est.csv")
cond_mean = np.loadtxt(constants_folder / "cond_mean_est.csv")
cond_std = np.loadtxt(constants_folder / "cond_std_est.csv")
config.n_x_features = 26
config.n_cond_features = 2
config.rnn_layers = 2
config.hidden_size = 64
model_rcINN, optim_rcINN, weight_scheduler_rcINN = Model.generate_rcINN_old()
config.n_x_features = 188
config.n_cond_features = 24
config.n_hidden_layer_size = 512
model_seq, optim_seq, weight_scheduler_seq = Model.generate_cINN_old()
config.n_x_features = 192
model_matching, optim_matching, weight_scheduler_matching = Model.generate_cINN_old()
Model.load(networks_folder / "model_bp_rcINN_matching.pth",
optim_rcINN, model_rcINN)
Model.load(networks_folder / "model_bp_sequence.pth",
optim_seq, model_seq)
Model.load(networks_folder / "model_bp_matching.pth",
optim_matching, model_matching)
model_rcINN.eval()
model_seq.eval()
model_matching.eval()
seq_len_total = []
for stat in true_stats:
y_seq = np.stack([(stat[0] - y_mean) / y_std]*1000, axis=0)
z_seq = np.random.randn(1000, 188)
y_seq = torch.tensor(y_seq, dtype=torch.float32)
z_seq = torch.tensor(z_seq, dtype=torch.float32)
y_seq = y_seq.to(device)
z_seq = z_seq.to(device)
output_seq = model_seq(z_seq, c=y_seq, rev=True)
output_seq = output_seq.cpu().detach()
seq_lengths = []
for i in range(len(output_seq)):
seq_lengths.append(find_nearest(output_seq[i], 1) + 6)
seq_len_total.append(seq_lengths)
final_lengths = []
for lens in seq_len_total:
occurence = []
len_track = []
for len_i in lens:
if len_i not in len_track:
len_track.append(len_i)
counter = 0
for len_j in lens:
if len_j == len_i:
counter += 1
occurence.append(counter)
occurence = np.array(occurence)
len_track = np.array(len_track)
idx_occ = np.argsort(occurence)[::-1]
len_track = len_track[idx_occ]
final_lengths.append(len_track[:10])
match_totals = []
for k in range(len(true_stats)):
y_matching = np.stack(
[(true_stats[k][0] - y_mean) / y_std]*5000, axis=0)
z_matching = np.random.randn(5000, 192)
y_matching = torch.tensor(y_matching, dtype=torch.float32)
z_matching = torch.tensor(z_matching, dtype=torch.float32)
y_matching = y_matching.to(device)
z_matching = z_matching.to(device)
output_matching = model_matching(
z_matching, c=y_matching, rev=True)
output_matching = output_matching.cpu().detach()
match_totals.append((output_matching, true_stats[k][2], k))
if mp:
with Pool(os.cpu_count() - 1) as pool:
processed_matches = pool.map(process_matching, match_totals)
processed_matches = sorted(processed_matches, key=lambda x: x[1])
else:
processed_matches = []
processed_matches.append(process_matching(match_totals[0]))
final_matches = []
for matches in processed_matches:
occurence = []
match_track = []
for match_i in matches[0]:
if match_i not in match_track:
match_track.append(match_i)
counter = 0
for match_j in matches[0]:
if match_j == match_i:
counter += 1
occurence.append(counter)
occurence = np.array(occurence)
match_track = np.array(match_track)
idx_occ = np.argsort(occurence)[::-1]
match_track = match_track[idx_occ]
final_matches.append(match_track)
paired_totals = []
for k in range(len(final_lengths)):
paired_matchings = []
for i in range(len(final_lengths[k])):
sub_match = []
for j in range(len(final_matches[k])):
if final_matches[k][j][-1] + 1 == final_lengths[k][i]:
sub_match.append(final_matches[k][j])
paired_matchings.append((final_lengths[k][i], sub_match))
paired_totals.append(paired_matchings)
outputs = []
for k in range(len(paired_totals)):
y_rcINN = []
z_rcINN = []
for i in range(len(paired_totals[k])):
if len(paired_totals[k][i][1]) == 0:
continue
z_stack = []
cond_stack = []
counter = 0
for j in range(len(paired_totals[k][i][1])):
z_stack.append(np.random.randn(
50, 26, paired_totals[k][i][0]))
cond_stack.append(np.stack([datagen.y_to_cond(
paired_totals[k][i][1][j], paired_totals[k][i][0], true_stats[k][0])]*50, axis=0))
counter += 1
if counter == 10:
break
z_stack = np.concatenate(z_stack, axis=0)
cond_stack = np.concatenate(cond_stack, axis=0)
y_rcINN.append(cond_stack)
z_rcINN.append(z_stack)
for i in range(len(y_rcINN)):
y_rcINN[i] = torch.tensor(y_rcINN[i], dtype=torch.float32)
y_rcINN[i][:, 0, :] = (
y_rcINN[i][:, 0, :] - cond_mean[0]) / cond_std[0]
y_rcINN[i][:, 1, :] = (
y_rcINN[i][:, 1, :] - cond_mean[1]) / cond_std[1]
y_rcINN[i] = torch.tensor(y_rcINN[i], dtype=torch.float32)
z_rcINN[i] = torch.tensor(z_rcINN[i], dtype=torch.float32)
big_output = []
for i in range(len(y_rcINN)):
y_rcINN[i] = y_rcINN[i].to(device)
z_rcINN[i] = z_rcINN[i].to(device)
output_rcINN = model_rcINN(
z_rcINN[i], c=y_rcINN[i], rev=True, recurrent=True)
output_rcINN = output_rcINN.cpu().detach()
output_rcINN[:, -2, :] = output_rcINN[:, -2, :] * \
aa_std + aa_mean
output_rcINN[:, -1, :] = output_rcINN[:, -1, :] * \
cc_std + cc_mean
big_output.append(output_rcINN)
outputs.append(big_output)
return outputs
def process_outputs_bp_recurrent_matching(outputs, true_stats):
n_Rwaves = true_stats[2]
filtered_bp = []
for i in range(len(outputs)):
for j in range(len(outputs[i])):
outputs_ij = np.array(outputs[i][j])
aa_i = float(np.mean(outputs_ij[-2]))
cc_i = float(np.mean(outputs_ij[-1]))
if aa_i < 188 or aa_i > 400 or cc_i < 1 or cc_i > aa_i:
continue
outputs_ij = outputs_ij.T
seq_i = []
for time_step in outputs_ij:
lvl1 = find_nearest(time_step[0:8], 1)
lvl2 = find_nearest(time_step[8:16], 1)
lvl3 = find_nearest(time_step[16:24], 1)
seq_i.append([lvl1, lvl2, lvl3])
bp_i = datagen.seq_to_block_pattern(seq_i)
bp_i_checked, bp_type = datagen.check_block_pattern_alt(
bp_i, n_Rwaves)
if len(bp_i_checked) != 0:
filtered_bp.append([bp_i_checked, bp_type, [aa_i, cc_i]])
return (filtered_bp, true_stats)
def get_solution_mp(intervals, network_name):
intervals = np.array(intervals)
stats = []
for interval in intervals:
y_i = np.zeros(24)
n_Rwaves = len(interval) + 1
y_i[:(n_Rwaves-1)] = interval
stats_i = [y_i, interval, n_Rwaves]
stats.append(stats_i)
if network_name == "bp_cINN":
outputs = get_outputs_bp(stats)
pairs = [(x, y) for x, y in zip(outputs, stats)]
with Pool(os.cpu_count() - 1) as pool:
filtered_outputs = pool.starmap(process_outputs_bp, pairs)
for filter_pair in filtered_outputs:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=False, splitter=False)
if network_name == "bp_cINN_multi":
outputs_1 = get_outputs_splitter(stats, "1")
outputs_2a = get_outputs_splitter(stats, "2a")
outputs_2b = get_outputs_splitter(stats, "2b")
outputs_2c = get_outputs_splitter(stats, "2c")
outputs_3 = get_outputs_splitter(stats, "3")
inputs1 = [(x, y, "1", i) for x, y, i in zip(
outputs_1, stats, [k for k in range(len(stats))])]
inputs2a = [(x, y, "2a", i) for x, y, i in zip(
outputs_2a, stats, [k for k in range(len(stats))])]
inputs2b = [(x, y, "2b", i) for x, y, i in zip(
outputs_2b, stats, [k for k in range(len(stats))])]
inputs2c = [(x, y, "2c", i) for x, y, i in zip(
outputs_2c, stats, [k for k in range(len(stats))])]
inputs3 = [(x, y, "3", i) for x, y, i in zip(
outputs_3, stats, [k for k in range(len(stats))])]
filtered_bp_total = []
with Pool(os.cpu_count() - 1) as pool:
filtered_bp1 = pool.map(process_outputs_splitter, inputs1)
filtered_bp2a = pool.map(process_outputs_splitter, inputs2a)
filtered_bp2b = pool.map(process_outputs_splitter, inputs2b)
filtered_bp2c = pool.map(process_outputs_splitter, inputs2c)
filtered_bp3 = pool.map(process_outputs_splitter, inputs3)
filtered_bp1 = sorted(filtered_bp1, key=lambda x: x[-1])
filtered_bp2a = sorted(filtered_bp2a, key=lambda x: x[-1])
filtered_bp2b = sorted(filtered_bp2b, key=lambda x: x[-1])
filtered_bp2c = sorted(filtered_bp2c, key=lambda x: x[-1])
filtered_bp3 = sorted(filtered_bp3, key=lambda x: x[-1])
for i in range(len(filtered_bp1)):
sub = []
sub.extend(filtered_bp1[i][0])
sub.extend(filtered_bp2a[i][0])
sub.extend(filtered_bp2b[i][0])
sub.extend(filtered_bp2c[i][0])
sub.extend(filtered_bp3[i][0])
filtered_bp_total.append((sub, filtered_bp1[i][1]))
for filter_pair in filtered_bp_total:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=False, splitter=True)
if network_name == "bp_rcINN":
outputs = get_outputs_bp_recurrent(stats)
pairs = [(x, y) for x, y in zip(outputs, stats)]
with Pool(os.cpu_count() - 1) as pool:
filtered_outputs = pool.starmap(
process_outputs_bp_recurrent, pairs)
for filter_pair in filtered_outputs:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=False, splitter=False)
if network_name == "bp_rcINN_matching":
outputs = get_outputs_bp_recurrent_matching(stats, mp=True)
pairs = [(x, y) for x, y in zip(outputs, stats)]
with Pool(os.cpu_count() - 1) as pool:
filtered_outputs = pool.starmap(
process_outputs_bp_recurrent_matching, pairs)
for filter_pair in filtered_outputs:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=False, splitter=False)
if network_name == "signal_cINN":
outputs = get_outputs_signals(stats)
pairs = [(x, y) for x, y in zip(outputs, stats)]
with Pool(os.cpu_count() - 1) as pool:
filtered_outputs = pool.starmap(process_outputs_signals, pairs)
for filter_pair in filtered_outputs:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=True, splitter=False)
if network_name == "signal_rcINN":
outputs = get_outputs_signals_recurrent(stats)
pairs = [(x, y) for x, y in zip(outputs, stats)]
with Pool(os.cpu_count() - 1) as pool:
filtered_outputs = pool.starmap(
process_outputs_signals_recurrent, pairs)
for filter_pair in filtered_outputs:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=True, splitter=False)
if network_name == "signal_rcINN_matching":
outputs = get_outputs_signals_recurrent_matching(stats, mp=True)
pairs = [(x, y) for x, y in zip(outputs, stats)]
with Pool(os.cpu_count() - 1) as pool:
filtered_outputs = pool.starmap(
process_outputs_signals_recurrent_matching, pairs)
for filter_pair in filtered_outputs:
print_stats(filter_pair[0], filter_pair[1],
network_name, signals=True, splitter=False)
def get_solution(intervals, network_name):
intervals = np.array(intervals)
y_i = np.zeros(24)
n_Rwaves = len(intervals) + 1
y_i[:(n_Rwaves-1)] = intervals
stats = [y_i, intervals, n_Rwaves]
if network_name == "bp_cINN":
outputs = get_outputs_bp([stats])
filter_output = process_outputs_bp(outputs[0], stats)
print_stats(filter_output[0], stats,
network_name, signals=False, splitter=False)
if network_name == "bp_cINN_multi":
outputs_1 = get_outputs_splitter([stats], "1")
outputs_2a = get_outputs_splitter([stats], "2a")
outputs_2b = get_outputs_splitter([stats], "2b")
outputs_2c = get_outputs_splitter([stats], "2c")
outputs_3 = get_outputs_splitter([stats], "3")
filter_output_1 = process_outputs_splitter(
(outputs_1[0], stats, "1", 0))
filter_output_2a = process_outputs_splitter(
(outputs_2a[0], stats, "2a", 0))
filter_output_2b = process_outputs_splitter(
(outputs_2b[0], stats, "2b", 0))
filter_output_2c = process_outputs_splitter(
(outputs_2c[0], stats, "2c", 0))
filter_output_3 = process_outputs_splitter(
(outputs_3[0], stats, "3", 0))
filter_total = []
filter_total.extend(filter_output_1[0])
filter_total.extend(filter_output_2a[0])
filter_total.extend(filter_output_2b[0])
filter_total.extend(filter_output_2c[0])
filter_total.extend(filter_output_3[0])
print_stats(filter_total, stats, network_name,
signals=False, splitter=True)
if network_name == "bp_rcINN":
outputs = get_outputs_bp_recurrent([stats])
filter_output = process_outputs_bp_recurrent(outputs[0], stats)
print_stats(filter_output[0], stats,
network_name, signals=False, splitter=False)
if network_name == "bp_rcINN_matching":
outputs = get_outputs_bp_recurrent_matching([stats], mp=False)
filter_output = process_outputs_bp_recurrent_matching(
outputs[0], stats)
print_stats(filter_output[0], stats,
network_name, signals=False, splitter=False)
if network_name == "signal_cINN":
outputs = get_outputs_signals([stats])
filter_output = process_outputs_signals(outputs[0], stats)
print_stats(filter_output[0], stats,
network_name, signals=True, splitter=False)
if network_name == "signal_rcINN":
outputs = get_outputs_signals_recurrent([stats])
filter_output = process_outputs_signals_recurrent(outputs[0], stats)
print_stats(filter_output[0], stats,
network_name, signals=True, splitter=False)
if network_name == "signal_rcINN_matching":
outputs = get_outputs_signals_recurrent_matching([stats], mp=False)
filter_output = process_outputs_signals_recurrent_matching(
outputs[0], stats)
print_stats(filter_output[0], stats,
network_name, signals=True, splitter=False)
def main():
y = datagen.get_random_y()
intervals = y[1][0:24]
network_name = "bp_rcINN_matching"
get_solution(intervals, network_name)
if __name__ == "__main__":
main()
| 41.047418
| 123
| 0.520102
| 11,747
| 87,431
| 3.607219
| 0.026219
| 0.013098
| 0.019116
| 0.019163
| 0.908812
| 0.893968
| 0.859584
| 0.852174
| 0.832114
| 0.827984
| 0
| 0.037144
| 0.345358
| 87,431
| 2,129
| 124
| 41.066698
| 0.703194
| 0.00231
| 0
| 0.776381
| 0
| 0
| 0.042684
| 0.029251
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018043
| false
| 0
| 0.008748
| 0
| 0.041553
| 0.008201
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5929438cc19f5d42633a7a5c9936c8941fc20c08
| 93
|
py
|
Python
|
python/learn/base/module/l1/pack/big/b2.py
|
qrsforever/workspace
|
53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f
|
[
"MIT"
] | 2
|
2017-06-07T03:20:42.000Z
|
2020-01-07T09:14:26.000Z
|
python/learn/base/module/l1/pack/big/b2.py
|
qrsforever/workspace
|
53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f
|
[
"MIT"
] | null | null | null |
python/learn/base/module/l1/pack/big/b2.py
|
qrsforever/workspace
|
53c7ce7ca7da62c9fbb3d991ae9e4e34d07ece5f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python2.7
print "never run pack/big/b2.py"
def b2_fun(): print "pack/big/b2.py"
| 15.5
| 36
| 0.677419
| 19
| 93
| 3.263158
| 0.684211
| 0.225806
| 0.290323
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0.11828
| 93
| 5
| 37
| 18.6
| 0.695122
| 0.204301
| 0
| 0
| 0
| 0
| 0.520548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3cb268cb79da96d631331961e215e6799ebc1016
| 5,662
|
py
|
Python
|
api/migrations/0001_initial.py
|
peterzernia/wanderlist
|
5fc2338ce52be146b051d89f336ec56d48d72de0
|
[
"MIT"
] | 27
|
2018-12-01T13:53:04.000Z
|
2021-12-27T18:03:09.000Z
|
api/migrations/0001_initial.py
|
peterzernia/countries
|
5fc2338ce52be146b051d89f336ec56d48d72de0
|
[
"MIT"
] | 11
|
2019-08-28T16:51:39.000Z
|
2021-06-10T17:41:49.000Z
|
api/migrations/0001_initial.py
|
peterzernia/countries
|
5fc2338ce52be146b051d89f336ec56d48d72de0
|
[
"MIT"
] | 3
|
2018-12-02T19:14:56.000Z
|
2021-10-30T16:59:51.000Z
|
# Generated by Django 2.1.2 on 2018-10-29 11:20
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('topLevelDomain', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('alpha2Code', models.CharField(blank=True, max_length=255, null=True)),
('alpha3Code', models.CharField(blank=True, max_length=255, null=True)),
('callingCodes', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('capital', models.CharField(blank=True, max_length=255, null=True)),
('altSpellings', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('region', models.CharField(blank=True, max_length=255, null=True)),
('subregion', models.CharField(blank=True, max_length=255, null=True)),
('population', models.IntegerField(blank=True, null=True)),
('latlng', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('demonym', models.CharField(blank=True, max_length=255, null=True)),
('area', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('gini', models.DecimalField(blank=True, decimal_places=1, max_digits=3, null=True)),
('timezones', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('borders', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('nativeName', models.CharField(blank=True, max_length=255, null=True)),
('numericCode', models.CharField(blank=True, max_length=255, null=True)),
('flag', models.CharField(blank=True, max_length=255, null=True)),
('cioc', models.CharField(blank=True, max_length=255, null=True)),
],
),
migrations.CreateModel(
name='currencies',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(blank=True, max_length=255, null=True)),
('name', models.CharField(blank=True, max_length=255, null=True)),
('symbol', models.CharField(blank=True, max_length=255, null=True)),
],
),
migrations.CreateModel(
name='languages',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('iso639_1', models.CharField(blank=True, max_length=255, null=True)),
('iso639_2', models.CharField(blank=True, max_length=255, null=True)),
('name', models.CharField(blank=True, max_length=255, null=True)),
('nativeName', models.CharField(blank=True, max_length=255, null=True)),
],
),
migrations.CreateModel(
name='regionalBlocs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('acronym', models.CharField(blank=True, max_length=255, null=True)),
('name', models.CharField(blank=True, max_length=255, null=True)),
('otherAcronyms', models.CharField(blank=True, max_length=255, null=True)),
('otherNames', models.CharField(blank=True, max_length=255, null=True)),
],
),
migrations.CreateModel(
name='translations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('de', models.CharField(blank=True, max_length=255, null=True)),
('es', models.CharField(blank=True, max_length=255, null=True)),
('fr', models.CharField(blank=True, max_length=255, null=True)),
('ja', models.CharField(blank=True, max_length=255, null=True)),
('it', models.CharField(blank=True, max_length=255, null=True)),
('br', models.CharField(blank=True, max_length=255, null=True)),
('pt', models.CharField(blank=True, max_length=255, null=True)),
('nl', models.CharField(blank=True, max_length=255, null=True)),
('hr', models.CharField(blank=True, max_length=255, null=True)),
('fa', models.CharField(blank=True, max_length=255, null=True)),
],
),
migrations.AddField(
model_name='country',
name='currencies',
field=models.ManyToManyField(to='api.currencies'),
),
migrations.AddField(
model_name='country',
name='languages',
field=models.ManyToManyField(to='api.languages'),
),
migrations.AddField(
model_name='country',
name='regionalBlocs',
field=models.ManyToManyField(to='api.regionalBlocs'),
),
migrations.AddField(
model_name='country',
name='translations',
field=models.ManyToManyField(to='api.translations'),
),
]
| 52.915888
| 114
| 0.588131
| 600
| 5,662
| 5.455
| 0.16
| 0.112741
| 0.195539
| 0.234647
| 0.817293
| 0.76963
| 0.701803
| 0.701803
| 0.701803
| 0.446379
| 0
| 0.03018
| 0.262628
| 5,662
| 106
| 115
| 53.415094
| 0.753772
| 0.007948
| 0
| 0.515152
| 1
| 0
| 0.082102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020202
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3cc0f5f031d7b1cd0c0a315d4e8faf16d5b4f3e0
| 3,375
|
py
|
Python
|
models_preprocessing.py
|
agoila/strong-lenses
|
88e2821629db0372c2c32641a359c7e700af45f9
|
[
"Apache-2.0"
] | 1
|
2019-03-10T22:34:59.000Z
|
2019-03-10T22:34:59.000Z
|
models_preprocessing.py
|
agoila/strong-lenses
|
88e2821629db0372c2c32641a359c7e700af45f9
|
[
"Apache-2.0"
] | null | null | null |
models_preprocessing.py
|
agoila/strong-lenses
|
88e2821629db0372c2c32641a359c7e700af45f9
|
[
"Apache-2.0"
] | null | null | null |
from keras.models import Sequential
from keras.layers import Flatten, Dense, Activation
from keras.layers.convolutional import Conv2D, MaxPooling2D
from keras.optimizers import Adam
from keras.regularizers import l2
def compiledConvnet(input_shape=(101, 101, 4)):
model = convnet(input_shape)
optimizer = Adam(lr = .0001, decay = 5e-5)
model.compile(optimizer=optimizer,
loss='binary_crossentropy',
metrics=['accuracy'])
return model
def convnet(input_shape=(101, 101, 4)):
model = Sequential()
model.add(Conv2D(64, (3, 3), strides=(2,2), activation='softplus',
input_shape=input_shape))
model.add(Conv2D(32, (3, 3), strides=(2,2), activation='softplus'))
model.add(Conv2D(16, (3, 3), activation='softplus'))
model.add(MaxPooling2D(pool_size=(2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(128, activation='softplus'))
model.add(Dense(32, activation='softplus'))
model.add(Dense(1, activation='sigmoid'))
return model
def compiled_maxpool_simpler_1(input_shape=(101, 101, 4)):
model = maxpool_simpler_1(input_shape)
optimizer = Adam(lr = .0001, decay = 5e-5)
model.compile(optimizer=optimizer,
loss='binary_crossentropy',
metrics=['accuracy'])
return model
def maxpool_simpler_1(input_shape=(101, 101, 4)):
model = Sequential()
model.add(MaxPooling2D(pool_size=(2,2), strides=(2,2), input_shape=input_shape))
model.add(Conv2D(32, (3, 3), strides=(2,2), activation='softplus',
input_shape=input_shape))
model.add(Conv2D(16, (3, 3), strides=(2,2), activation='softplus'))
model.add(MaxPooling2D(pool_size=(2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(128, activation='softplus'))
model.add(Dense(32, activation='softplus'))
model.add(Dense(1, activation='sigmoid'))
return model
def compiled_maxpool_convnet(input_shape=(101, 101, 4)):
model = convnet(input_shape)
optimizer = Adam(lr = .0001, decay = 5e-5)
model.compile(optimizer=optimizer,
loss='binary_crossentropy',
metrics=['accuracy'])
return model
def maxpool_convnet(input_shape=(101, 101, 4)):
model = Sequential()
model.add(MaxPooling2D(pool_size=(4,4), strides=(4,4), input_shape=input_shape))
model.add(Conv2D(64, (3, 3), strides=(2,2), activation='softplus'))
model.add(Conv2D(32, (3, 3), strides=(2,2), activation='softplus'))
model.add(Conv2D(16, (3, 3), activation='softplus'))
model.add(MaxPooling2D(pool_size=(2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(128, activation='softplus'))
model.add(Dense(32, activation='softplus'))
model.add(Dense(1, activation='sigmoid'))
return model
def compiledRegularizedConvnet(input_shape=(101, 101, 4)):
model = regularizedConvnet(input_shape)
optimizer = Adam(lr = .0001, decay = 5e-5)
model.compile(optimizer=optimizer,
loss='binary_crossentropy',
metrics=['accuracy'])
return model
reg = 0.5
def regularizedConvnet(input_shape=(101, 101, 4)):
model = Sequential()
model.add(Conv2D(64, (3, 3), strides=(2,2), activation='softplus',
input_shape=input_shape))
model.add(Conv2D(32, (3, 3), strides=(2,2), activation='softplus'))
model.add(Conv2D(16, (3, 3), activation='softplus'))
model.add(MaxPooling2D(pool_size=(2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(128, activation='softplus', kernel_regularizer=l2(reg)))
model.add(Dense(32, activation='softplus'))
model.add(Dense(1, activation='sigmoid'))
return model
| 32.76699
| 81
| 0.722667
| 482
| 3,375
| 4.970954
| 0.122407
| 0.110184
| 0.14399
| 0.162771
| 0.872705
| 0.866444
| 0.856427
| 0.84808
| 0.845159
| 0.824708
| 0
| 0.070511
| 0.100741
| 3,375
| 103
| 82
| 32.76699
| 0.718946
| 0
| 0
| 0.731707
| 0
| 0
| 0.085308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0
| 0.060976
| 0
| 0.256098
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3cdecd7ce140d5c2a62159f85cf71ec143a3cccd
| 110
|
py
|
Python
|
formulaic/tests/__init__.py
|
Govexec/django-formulaic
|
b9516a963673cb21279a2ecdf62a199b9203d538
|
[
"MIT"
] | 1
|
2021-08-20T04:21:20.000Z
|
2021-08-20T04:21:20.000Z
|
formulaic/tests/__init__.py
|
Govexec/django-formulaic
|
b9516a963673cb21279a2ecdf62a199b9203d538
|
[
"MIT"
] | 4
|
2020-12-05T00:31:40.000Z
|
2021-09-22T20:06:53.000Z
|
formulaic/tests/__init__.py
|
Govexec/django-formulaic
|
b9516a963673cb21279a2ecdf62a199b9203d538
|
[
"MIT"
] | 1
|
2020-12-04T19:16:36.000Z
|
2020-12-04T19:16:36.000Z
|
from .test_forms import *
from .test_models import *
from .test_views import *
from .test_csv_export import *
| 22
| 30
| 0.781818
| 17
| 110
| 4.764706
| 0.470588
| 0.395062
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 110
| 4
| 31
| 27.5
| 0.861702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a71fbc651bcd48513df3f75072e0656588b31c89
| 24,029
|
py
|
Python
|
generators/pll-gen/pymodules/run_pex_flow.py
|
idea-fasoc/fasoc
|
5a1fc8cf980b24a48b17f4447f13fb50d49e366a
|
[
"MIT"
] | 48
|
2019-09-16T09:49:54.000Z
|
2022-02-09T20:59:10.000Z
|
generators/pll-gen/pymodules/run_pex_flow.py
|
idea-fasoc/fasoc
|
5a1fc8cf980b24a48b17f4447f13fb50d49e366a
|
[
"MIT"
] | 18
|
2019-10-15T04:17:35.000Z
|
2021-05-25T00:12:52.000Z
|
generators/pll-gen/pymodules/run_pex_flow.py
|
idea-fasoc/fasoc
|
5a1fc8cf980b24a48b17f4447f13fb50d49e366a
|
[
"MIT"
] | 8
|
2019-10-15T17:27:41.000Z
|
2022-01-26T20:42:07.000Z
|
import os
import glob
import shutil
import subprocess as sp
def gen_post_pex_netlist(platform, designName, formatDir, flowDir, extDir, calibreRulesDir, wellpin, spectre):
# v2lvs netlist: 1. wellpin in cadre => copy from cadre 2. no wellpin in cadre => run v2lvs again with modified verilog
if wellpin==1:
for file in glob.glob(flowDir+'/results/calibre/lvs/_'+designName+'*.sp'):
shutil.copy(file, extDir+'/sch/'+designName+'.spi')
else:
p = sp.Popen(['cp', flowDir+'/results/innovus/'+designName+'_lvs.v', flowDir+'/results/innovus/'+designName+'_lvs_well.v'])
p.wait()
p = sp.Popen(['vi', flowDir+'/results/innovus/'+designName+'_lvs_well.v', \
'-c', '%s/.VDD(VDD)/.VDD(VDD), .VNW(VDD), .VPW(VSS)/g | wq'])
p.wait()
cdlInclude = ''
cdlParse = ''
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.cdlList', 'r') as file:
filedata = file.readlines()
for line in filedata:
cdlInclude = cdlInclude + ' -s ' + line.rstrip()
cdlParse = cdlParse + ' -lsr ' + line.rstrip()
p = sp.Popen(['v2lvs', cdlParse, '-lsr', flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-lsr', flowDir+'/blocks/dco_FC/export/dco_FC.cdl',
cdlInclude, '-s',flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-s',flowDir+'/blocks/dco_FC/export/dco_FC.cdl','-v',
flowDir+'/results/innovus/'+designName+'_lvs_well.v',
'-o',extDir+'/sch/'+designName+'.spi','-i','-c','/_'])
p.wait()
# Copy the merged gds file to extraction directory
p = sp.Popen(['cp', flowDir+'/results/calibre/'+designName+'.merged.gds.gz', \
extDir+'/layout/'+designName+'.gds.gz'])
p.wait()
# Copy runsets
shutil.copy(formatDir+'pex.runset.'+platform, extDir+'/runsets/pex.runset.'+platform)
# Clean the space for PEX
if os.path.isfile(extDir + '/run/svdb/' + designName + '.dv'):
os.remove(extDir + '/run/svdb/' + designName + '.dv')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.extf'):
os.remove(extDir + '/run/svdb/' + designName + '.extf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.lvsf'):
os.remove(extDir + '/run/svdb/' + designName + '.lvsf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.pdsp'):
os.remove(extDir + '/run/svdb/' + designName + '.pdsp')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.sp'):
os.remove(extDir + '/run/svdb/' + designName + '.sp')
if os.path.isdir(extDir + '/run/svdb/' + designName + '.phdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.phdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.xdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.xdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + 'template'):
shutil.rmtree(extDir + '/run/svdb/' + 'template',
ignore_errors=True)
# Set the environment variables
if platform == 'gf12lp':
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.beolStack', 'r') as file:
filedata = file.read()
os.environ['BEOL_STACK'] = filedata.rstrip()
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.techLvsDir', 'r') as file:
filedata = file.read()
os.environ['TECHDIR_LVS'] = filedata.rstrip()
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.techPexDir', 'r') as file:
filedata = file.read()
os.environ['TECHDIR_XACT'] = filedata.rstrip()
os.environ['PEX_RUN'] = 'TRUE'
# Configure the PEX rule files
for file in os.listdir(calibreRulesDir + '/'):
if not os.path.isdir(calibreRulesDir + '/' + file):
shutil.copy2(calibreRulesDir+'/'+file, extDir+'/run/')
with open(extDir+'/runsets/pex.runset.'+platform, 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
if spectre==0:
extRunDir=extDir+'/run/'
filedata = filedata.replace('netlistform', 'HSPICE')
elif spectre==1:
extRunDir=extDir+'/run_scs/'
filedata = filedata.replace('netlistform', 'SPECTRE')
with open(extRunDir+'pex.runset', 'w') as file:
file.write(filedata)
# Run Calibre RCX
if platform == 'gf12lp':
p = sp.Popen(['calibre','-gui','-xact','-batch','-runset',
'pex.runset'],cwd=extRunDir)
p.wait()
else:
p = sp.Popen(['calibre','-gui','-pex','-batch','-runset',
'pex.runset'],cwd=extRunDir)
p.wait()
# tsmc65lp
# p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.rcx_',extDir+'/run/'])
# p.wait()
# p = sp.Popen(['cp',calibreRulesDir+'/calibre.rcx',extDir+'/run/'])
# p.wait()
# p = sp.Popen(['cp',calibreRulesDir+'/rules',extDir+'/run/'])
# p.wait()
# with open(extDir+'/run/_calibre.rcx_', 'r') as file:
# filedata = file.read()
# filedata = filedata.replace('design', designName)
# with open(extDir+'/run/_calibre.rcx_', 'w') as file:
# file.write(filedata)
# p = sp.Popen(['calibre','-xrc','-phdb','-nowait','-turbo','1',
# '_calibre.rcx_'],cwd=extDir+'/run')
# p.wait()
# p = sp.Popen(['calibre','-xrc','-pdb','-rcc','-turbo','1','-nowait',
# '_calibre.rcx_'],cwd=extDir+'/run')
# p.wait()
# p = sp.Popen(['calibre','-xrc','-fmt','-all','-nowait','_calibre.rcx_'],
# cwd=extDir+'/run')
# p.wait()
#------------------------------------------------------------------------------
# LVS and PEX flow for 65nm (extra steps for welltaps issues)
# flowDir should be absolute path
#------------------------------------------------------------------------------
def lvs_pex_65nm(calibreRulesDir,flowDir,extDir,simDir,designName,lvs,pex):
#with open('./tempFiles/platform_config.json') as file:
# jsonConfig = json.load(file)
#
#calibreRulesDir = jsonConfig['calibreRules']
if lvs==1 or pex==1:
# Generate pre PEX netlist and gds files
p = sp.Popen(['cp', flowDir+'/results/innovus/'+designName+'_lvs.v', flowDir+'/results/innovus/'+designName+'_lvs_well.v'])
p.wait()
p = sp.Popen(['vi', flowDir+'/results/innovus/'+designName+'_lvs_well.v', \
'-c', '%s/.VDD(VDD)/.VDD(VDD), .VNW(VDD), .VPW(VSS)/g | wq'])
p.wait()
cdlInclude = ''
cdlParse = ''
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.cdlList', 'r') as file:
filedata = file.readlines()
for line in filedata:
cdlInclude = cdlInclude + ' -s ' + line.rstrip()
cdlParse = cdlParse + ' -lsr ' + line.rstrip()
p = sp.Popen(['v2lvs', cdlParse, '-lsr', flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-lsr', flowDir+'/blocks/dco_FC/export/dco_FC.cdl',
cdlInclude, '-s',flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-s',flowDir+'/blocks/dco_FC/export/dco_FC.cdl','-v',
flowDir+'/results/innovus/'+designName+'_lvs_well.v',
'-o',extDir+'/sch/'+designName+'.spi','-i','-c','/_'])
p.wait()
# NOTE: The exported version of the gds is not merged (i.e. doesn't include standard cells)
p = sp.Popen(['cp', flowDir+'/results/calibre/'+designName+'.merged.gds.gz', \
extDir+'/layout/'+designName+'.gds.gz'])
p.wait()
# Clean the space
if os.path.isfile(extDir + '/run/svdb/' + designName + '.dv'):
os.remove(extDir + '/run/svdb/' + designName + '.dv')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.extf'):
os.remove(extDir + '/run/svdb/' + designName + '.extf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.lvsf'):
os.remove(extDir + '/run/svdb/' + designName + '.lvsf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.pdsp'):
os.remove(extDir + '/run/svdb/' + designName + '.pdsp')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.sp'):
os.remove(extDir + '/run/svdb/' + designName + '.sp')
# Calibre LVS
if lvs==1:
p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.lvs_',extDir+'/run/'])
p.wait()
with open(extDir+'/run/_calibre.lvs_', 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
with open(extDir+'/run/_calibre.lvs_', 'w') as file:
file.write(filedata)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdhB'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdhB',
ignore_errors=True)
p = sp.Popen(['calibre','-spice',designName+'.sp','-lvs','-hier','-nowait',
'_calibre.lvs_'],cwd=extDir+'/run')
p.wait()
print ('# PLL - LVS completed. check '+extDir+'/run/'+designName+'.lvs.report')
# Calibre RCX
if pex==1:
p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.rcx_',extDir+'/run/'])
p.wait()
p = sp.Popen(['cp',calibreRulesDir+'/calibre.rcx',extDir+'/run/'])
p.wait()
p = sp.Popen(['cp',calibreRulesDir+'/rules',extDir+'/run/'])
p.wait()
with open(extDir+'/run/_calibre.rcx_', 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
with open(extDir+'/run/_calibre.rcx_', 'w') as file:
file.write(filedata)
# Clean
if os.path.isdir(extDir + '/run/svdb/' + designName + '.phdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.phdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.xdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.xdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + 'template'):
shutil.rmtree(extDir + '/run/svdb/' + 'template',
ignore_errors=True)
p = sp.Popen(['calibre','-xrc','-phdb','-nowait','-turbo','1',
'_calibre.rcx_'],cwd=extDir+'/run')
p.wait()
p = sp.Popen(['calibre','-xrc','-pdb','-rcc','-turbo','1','-nowait',
'_calibre.rcx_'],cwd=extDir+'/run')
p.wait()
p = sp.Popen(['calibre','-xrc','-fmt','-all','-nowait','_calibre.rcx_'],
cwd=extDir+'/run')
p.wait()
print (designName+' post PEX netlist Generated')
#------------------------------------------------------------------------------
# generates dco.gds, .pex.netlist
#------------------------------------------------------------------------------
def dco_flow_pex(calibreRulesDir,netlistDir,formatDir,flowDir,rawPexDir,extDir,simDir,ndrv,ncc,nfc,nstg,ninterp,W_CC,H_CC,W_FC,H_FC,bleach,design,pex):
designName='dco_%ddrv_%dcc_%dfc_%dstg'%(ndrv,ncc,nfc,nstg)
print('starting flow for '+designName)
#-------------------------------------------
# flow setup
#-------------------------------------------
if bleach==1:
p = sp.Popen(['make','bleach_all'], cwd=flowDir)
p.wait()
Flow_setup.dco_flow_setup(formatDir,flowDir,ndrv,ncc,nfc,nstg)
NCtotal=nstg*(ncc+ndrv)
NFtotal=nstg*(nfc)
Atotal=NCtotal*W_CC*H_CC+NFtotal*W_FC*H_FC
W_core=math.ceil(math.sqrt(Atotal)*1.2)
H_core=W_core
with open(flowDir + '/scripts/innovus/always_source.tcl', 'r') as file:
filedata = file.read()
filedata = re.sub(r'set core_width.*', r'set core_width ' + \
str(W_core) + ' ;# Core Area Width', filedata)
filedata = re.sub(r'set core_height.*', r'set core_height ' + \
str(H_core) + ' ;# Core Area Height', filedata)
with open(flowDir + '/scripts/innovus/always_source.tcl', 'w') as file:
file.write(filedata)
#-------------------------------------------
# run CADRE flow
#-------------------------------------------
if design==1:
p = sp.Popen(['make','design'], cwd=flowDir)
p.wait()
p = sp.Popen(['make','lvs'], cwd=flowDir)
p.wait()
p = sp.Popen(['make','drc'], cwd=flowDir)
p.wait()
p = sp.Popen(['make','export'], cwd=flowDir)
p.wait()
#-------------------------------------------
# check if pex.netlist already exists
#-------------------------------------------
if pex==1:
try:
exist=open(netlistDir+designName+'.pex.netlist','r')
print(designName+'.pex.netlist already exists')
except:
#-------------------------------------------
# generate pex view
#-------------------------------------------
lvs=1 # do lvs for default
lvs_pex_65nm(calibreRulesDir,flowDir,extDir,simDir,designName,lvs,pex)
#-------------------------------------------
# modify the pex netlist
#-------------------------------------------
HSPICEpex_netlist.gen_pex_netlist(rawPexDir,netlistDir,formatDir,ncc,ndrv,nfc,nstg,ninterp,designName)
#-------------------------------------------
# copy .pxi, .pex
#-------------------------------------------
p = sp.Popen(['cp',extDir+'/run/'+designName+'.pex.netlist.'+designName+'.pxi',netlistDir+'/'+designName+'.pex.netlist.'+designName+'.pxi'])
p.wait()
p = sp.Popen(['cp',extDir+'/run/'+designName+'.pex.netlist.pex',netlistDir+'/'+designName+'.pex.netlist.pex'])
p.wait()
#------------------------------------------------------------------------------
# Run LVS and generate post PEX netlist
# flowDir should be absolute path
# This funciton is especially for design using ff_dco as a Hard Macro
#------------------------------------------------------------------------------
def post_apr_HM(VDDnames,buf,bufName,dcoName,calibreRulesDir,flowDir,extDir,designName,lvs,pex):
#with open('./tempFiles/platform_config.json') as file:
# jsonConfig = json.load(file)
#
#calibreRulesDir = jsonConfig['calibreRules']
if lvs==1 or pex==1:
# Generate pre PEX netlist and gds files
p = sp.Popen(['cp', flowDir+'/results/innovus/'+designName+'_lvs.v', flowDir+'/results/innovus/'+designName+'_lvs_well.v'])
p.wait()
for VDDname in VDDnames:
p = sp.Popen(['vi', flowDir+'/results/innovus/'+designName+'_lvs_well.v', \
'-c', '%s/.VDD('+VDDname+')/.VDD('+VDDname+'), .VNW('+VDDname+'), .VPW(VSS)/g | wq'])
p.wait()
cdlInclude = ''
cdlParse = ''
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.cdlList', 'r') as file:
filedata = file.readlines()
for line in filedata:
cdlInclude = cdlInclude + ' -s ' + line.rstrip()
cdlParse = cdlParse + ' -lsr ' + line.rstrip()
#p = sp.Popen(['v2lvs', cdlParse, '-lsr', flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-lsr', flowDir+'/blocks/dco_FC/export/dco_FC.cdl', flowDir+'/blocks/ff_dco4/export/ff_dco4.cdl',
# cdlInclude, '-s',flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-s',flowDir+'/blocks/dco_FC/export/dco_FC.cdl','-s',flowDir+'/blocks/ff_dco4/export/ff_dco4.cdl', '-v',
# flowDir+'/results/innovus/'+designName+'_lvs_well.v',
# #flowDir+'/results/innovus/'+designName+'_lvs_well.v', '-v',flowDir+'/blocks/ff_dco/export/ff_dco.v',
# '-o',extDir+'/sch/'+designName+'.spi','-c','/_'])
# #'-o',extDir+'/sch/'+designName+'.spi','-i','-c','/_'])
if buf==0:
p = sp.Popen(['v2lvs', cdlParse, '-lsr', flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-lsr', flowDir+'/blocks/dco_FC/export/dco_FC.cdl', flowDir+'/blocks/'+dcoName+'/export/'+dcoName+'.cdl',
cdlInclude, '-s',flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-s',flowDir+'/blocks/dco_FC/export/dco_FC.cdl','-s',flowDir+'/blocks/'+dcoName+'/export/'+dcoName+'.cdl', '-v',
flowDir+'/results/innovus/'+designName+'_lvs_well.v',
'-o',extDir+'/sch/'+designName+'.spi','-c','/_'])
p.wait()
elif buf==1:
p = sp.Popen(['v2lvs', cdlParse, '-lsr', flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-lsr', flowDir+'/blocks/dco_FC/export/dco_FC.cdl',flowDir+'/blocks/'+bufName+'/export/'+bufName+'.cdl', flowDir+'/blocks/'+dcoName+'/export/'+dcoName+'.cdl',
cdlInclude, '-s',flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-s',flowDir+'/blocks/dco_FC/export/dco_FC.cdl','-s',flowDir+'/blocks/'+dcoName+'/export/'+dcoName+'.cdl','-s',flowDir+'/blocks/'+bufName+'/export/'+bufName+'.cdl', '-v',
flowDir+'/results/innovus/'+designName+'_lvs_well.v',
'-o',extDir+'/sch/'+designName+'.spi','-c','/_'])
p.wait()
# NOTE: The exported version of the gds is not merged (i.e. doesn't include standard cells)
p = sp.Popen(['cp', flowDir+'/results/calibre/'+designName+'.merged.gds.gz', \
extDir+'/layout/'+designName+'.gds.gz'])
p.wait()
# Clean the space
if os.path.isfile(extDir + '/run/svdb/' + designName + '.dv'):
os.remove(extDir + '/run/svdb/' + designName + '.dv')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.extf'):
os.remove(extDir + '/run/svdb/' + designName + '.extf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.lvsf'):
os.remove(extDir + '/run/svdb/' + designName + '.lvsf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.pdsp'):
os.remove(extDir + '/run/svdb/' + designName + '.pdsp')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.sp'):
os.remove(extDir + '/run/svdb/' + designName + '.sp')
# Calibre LVS
if lvs==1:
p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.lvs_',extDir+'/run/'])
p.wait()
with open(extDir+'/run/_calibre.lvs_', 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
with open(extDir+'/run/_calibre.lvs_', 'w') as file:
file.write(filedata)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdhB'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdhB',
ignore_errors=True)
p = sp.Popen(['calibre','-spice',designName+'.sp','-lvs','-hier','-nowait',
'_calibre.lvs_'],cwd=extDir+'/run')
p.wait()
print ('# PLL - LVS completed. check '+extDir+'/run/'+designName+'.lvs.report')
# Calibre RCX
if pex==1:
p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.rcx_',extDir+'/run/'])
p.wait()
p = sp.Popen(['cp',calibreRulesDir+'/calibre.rcx',extDir+'/run/'])
p.wait()
p = sp.Popen(['cp',calibreRulesDir+'/rules',extDir+'/run/'])
p.wait()
with open(extDir+'/run/_calibre.rcx_', 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
with open(extDir+'/run/_calibre.rcx_', 'w') as file:
file.write(filedata)
# Clean
if os.path.isdir(extDir + '/run/svdb/' + designName + '.phdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.phdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.xdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.xdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + 'template'):
shutil.rmtree(extDir + '/run/svdb/' + 'template',
ignore_errors=True)
p = sp.Popen(['calibre','-xrc','-phdb','-nowait','-turbo','1',
'_calibre.rcx_'],cwd=extDir+'/run')
p.wait()
p = sp.Popen(['calibre','-xrc','-pdb','-rcc','-turbo','1','-nowait',
'_calibre.rcx_'],cwd=extDir+'/run')
p.wait()
p = sp.Popen(['calibre','-xrc','-fmt','-all','-nowait','_calibre.rcx_'],
cwd=extDir+'/run')
p.wait()
print ('# PLL - Post PEX netlist Generated')
def post_apr(dcoAux,calibreRulesDir,flowDir,extDir,designName,lvs,pex):
#with open('./tempFiles/platform_config.json') as file:
# jsonConfig = json.load(file)
#
#calibreRulesDir = jsonConfig['calibreRules']
if lvs==1 or pex==1:
# Generate pre PEX netlist and gds files
p = sp.Popen(['cp', flowDir+'/results/innovus/'+designName+'_lvs.v', flowDir+'/results/innovus/'+designName+'_lvs_well.v'])
p.wait()
p = sp.Popen(['vi', flowDir+'/results/innovus/'+designName+'_lvs_well.v', \
'-c', '%s/.VDD(VDD)/.VDD(VDD), .VNW(VDD), .VPW(VSS)/g | wq'])
p.wait()
cdlInclude = ''
cdlParse = ''
with open(flowDir + '/scripts/innovus/generated/' + designName + \
'.cdlList', 'r') as file:
filedata = file.readlines()
for line in filedata:
cdlInclude = cdlInclude + ' -s ' + line.rstrip()
cdlParse = cdlParse + ' -lsr ' + line.rstrip()
if dcoAux==1:
p = sp.Popen(['v2lvs', cdlParse, '-lsr', flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-lsr', flowDir+'/blocks/dco_FC/export/dco_FC.cdl',
cdlInclude, '-s',flowDir+'/blocks/dco_CC/export/dco_CC.cdl','-s',flowDir+'/blocks/dco_FC/export/dco_FC.cdl','-v',
flowDir+'/results/innovus/'+designName+'_lvs_well.v',
'-o',extDir+'/sch/'+designName+'.spi','-i','-c','/_'])
p.wait()
else:
p = sp.Popen(['v2lvs', cdlParse,
cdlInclude,'-v',
flowDir+'/results/innovus/'+designName+'_lvs_well.v',
'-o',extDir+'/sch/'+designName+'.spi','-i','-c','/_'])
p.wait()
# NOTE: The exported version of the gds is not merged (i.e. doesn't include standard cells)
p = sp.Popen(['cp', flowDir+'/results/calibre/'+designName+'.merged.gds.gz', \
extDir+'/layout/'+designName+'.gds.gz'])
p.wait()
# Clean the space
if os.path.isfile(extDir + '/run/svdb/' + designName + '.dv'):
os.remove(extDir + '/run/svdb/' + designName + '.dv')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.extf'):
os.remove(extDir + '/run/svdb/' + designName + '.extf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.lvsf'):
os.remove(extDir + '/run/svdb/' + designName + '.lvsf')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.pdsp'):
os.remove(extDir + '/run/svdb/' + designName + '.pdsp')
if os.path.isfile(extDir + '/run/svdb/' + designName + '.sp'):
os.remove(extDir + '/run/svdb/' + designName + '.sp')
# Calibre LVS
if lvs==1:
p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.lvs_',extDir+'/run/'])
p.wait()
with open(extDir+'/run/_calibre.lvs_', 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
with open(extDir+'/run/_calibre.lvs_', 'w') as file:
file.write(filedata)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdhB'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdhB',
ignore_errors=True)
p = sp.Popen(['calibre','-spice',designName+'.sp','-lvs','-hier','-nowait',
'_calibre.lvs_'],cwd=extDir+'/run')
p.wait()
print ('# PLL - LVS completed. check '+extDir+'/run/'+designName+'.lvs.report')
# Calibre RCX
if pex==1:
p = sp.Popen(['cp',extDir+'/ruleFiles/_calibre.rcx_',extDir+'/run/'])
p.wait()
p = sp.Popen(['cp',calibreRulesDir+'/calibre.rcx',extDir+'/run/'])
p.wait()
p = sp.Popen(['cp',calibreRulesDir+'/rules',extDir+'/run/'])
p.wait()
with open(extDir+'/run/_calibre.rcx_', 'r') as file:
filedata = file.read()
filedata = filedata.replace('design', designName)
with open(extDir+'/run/_calibre.rcx_', 'w') as file:
file.write(filedata)
# Clean
if os.path.isdir(extDir + '/run/svdb/' + designName + '.phdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.phdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.xdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.xdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + designName + '.pdb'):
shutil.rmtree(extDir + '/run/svdb/' + designName + '.pdb',
ignore_errors=True)
if os.path.isdir(extDir + '/run/svdb/' + 'template'):
shutil.rmtree(extDir + '/run/svdb/' + 'template',
ignore_errors=True)
p = sp.Popen(['calibre','-xrc','-phdb','-nowait','-turbo','1',
'_calibre.rcx_'],cwd=extDir+'/run')
p.wait()
p = sp.Popen(['calibre','-xrc','-pdb','-rcc','-turbo','1','-nowait',
'_calibre.rcx_'],cwd=extDir+'/run')
p.wait()
p = sp.Popen(['calibre','-xrc','-fmt','-all','-nowait','_calibre.rcx_'],
cwd=extDir+'/run')
p.wait()
print ('# PLL - Post PEX netlist Generated')
| 43.373646
| 245
| 0.58471
| 3,006
| 24,029
| 4.593812
| 0.083167
| 0.084727
| 0.07343
| 0.116591
| 0.84626
| 0.829821
| 0.821421
| 0.808169
| 0.782823
| 0.759432
| 0
| 0.00299
| 0.164884
| 24,029
| 553
| 246
| 43.45208
| 0.685155
| 0.169379
| 0
| 0.808184
| 0
| 0.007673
| 0.268617
| 0.058255
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012788
| false
| 0
| 0.01023
| 0
| 0.023018
| 0.02046
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5995f91f4e3ae2df40bfe9a5293882cc2a8a2f4a
| 440
|
py
|
Python
|
library/test.py
|
dglaude/led-shim
|
3facf2b17360fc2b17ac28ecc32e98080165d464
|
[
"MIT"
] | 29
|
2018-07-06T16:30:03.000Z
|
2022-02-25T19:59:06.000Z
|
library/test.py
|
dglaude/led-shim
|
3facf2b17360fc2b17ac28ecc32e98080165d464
|
[
"MIT"
] | 12
|
2018-05-17T16:35:12.000Z
|
2022-02-11T07:30:19.000Z
|
library/test.py
|
dglaude/led-shim
|
3facf2b17360fc2b17ac28ecc32e98080165d464
|
[
"MIT"
] | 11
|
2018-11-04T19:42:12.000Z
|
2022-01-26T23:46:58.000Z
|
import time
import ledshim
for x in range(ledshim.width):
ledshim.set_pixel(x,255,0,0)
ledshim.show()
time.sleep(0.05)
time.sleep(0.1)
ledshim.clear()
ledshim.show()
for x in range(ledshim.width):
ledshim.set_pixel(x,0,255,0)
ledshim.show()
time.sleep(0.05)
time.sleep(0.1)
ledshim.clear()
ledshim.show()
for x in range(ledshim.width):
ledshim.set_pixel(x,0,0,255)
ledshim.show()
time.sleep(0.05)
| 15.714286
| 32
| 0.670455
| 77
| 440
| 3.792208
| 0.220779
| 0.188356
| 0.171233
| 0.113014
| 0.883562
| 0.883562
| 0.804795
| 0.804795
| 0.804795
| 0.804795
| 0
| 0.076503
| 0.168182
| 440
| 27
| 33
| 16.296296
| 0.721311
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6162420fadc223e5faacb16aeb864259b14c30c
| 14,095
|
py
|
Python
|
installers/charm/ro/tests/test_pod_spec.py
|
ayoubbargueoui1996/osm-devops
|
b5973c2a4477354bb17a56fe39559f277a3a994a
|
[
"Apache-2.0"
] | null | null | null |
installers/charm/ro/tests/test_pod_spec.py
|
ayoubbargueoui1996/osm-devops
|
b5973c2a4477354bb17a56fe39559f277a3a994a
|
[
"Apache-2.0"
] | null | null | null |
installers/charm/ro/tests/test_pod_spec.py
|
ayoubbargueoui1996/osm-devops
|
b5973c2a4477354bb17a56fe39559f277a3a994a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright 2020 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# For those usages not covered by the Apache License, Version 2.0 please
# contact: legal@canonical.com
#
# To get in touch with the maintainers, please contact:
# osm-charmers@lists.launchpad.net
##
from typing import NoReturn
import unittest
import pod_spec
class TestPodSpec(unittest.TestCase):
"""Pod spec unit tests."""
def test_make_pod_ports(self) -> NoReturn:
"""Testing make pod ports."""
port = 9090
expected_result = [
{
"name": "ro",
"containerPort": port,
"protocol": "TCP",
}
]
pod_ports = pod_spec._make_pod_ports(port)
self.assertListEqual(expected_result, pod_ports)
def test_make_pod_envconfig_ng_ro(self) -> NoReturn:
"""Teting make pod envconfig."""
config = {
"enable_ng_ro": True,
"database_commonkey": "osm",
"log_level": "INFO",
}
relation_state = {
"kafka_host": "kafka",
"kafka_port": "9090",
"mongodb_connection_string": "mongodb://mongo",
}
expected_result = {
"OSMRO_LOG_LEVEL": config["log_level"],
"OSMRO_MESSAGE_DRIVER": "kafka",
"OSMRO_MESSAGE_HOST": relation_state["kafka_host"],
"OSMRO_MESSAGE_PORT": relation_state["kafka_port"],
"OSMRO_DATABASE_DRIVER": "mongo",
"OSMRO_DATABASE_URI": relation_state["mongodb_connection_string"],
"OSMRO_DATABASE_COMMONKEY": config["database_commonkey"],
}
pod_envconfig = pod_spec._make_pod_envconfig(config, relation_state)
self.assertDictEqual(expected_result, pod_envconfig)
def test_make_pod_envconfig_no_ng_ro(self) -> NoReturn:
"""Teting make pod envconfig."""
config = {
"log_level": "INFO",
"enable_ng_ro": False,
"vim_database": "mano_vim_db",
"ro_database": "mano_db",
"openmano_tenant": "osm",
}
relation_state = {
"mysql_host": "mysql",
"mysql_port": 3306,
"mysql_user": "mano",
"mysql_password": "manopw",
"mysql_root_password": "rootmanopw",
}
expected_result = {
"OSMRO_LOG_LEVEL": config["log_level"],
"RO_DB_HOST": relation_state["mysql_host"],
"RO_DB_OVIM_HOST": relation_state["mysql_host"],
"RO_DB_PORT": relation_state["mysql_port"],
"RO_DB_OVIM_PORT": relation_state["mysql_port"],
"RO_DB_USER": relation_state["mysql_user"],
"RO_DB_OVIM_USER": relation_state["mysql_user"],
"RO_DB_PASSWORD": relation_state["mysql_password"],
"RO_DB_OVIM_PASSWORD": relation_state["mysql_password"],
"RO_DB_ROOT_PASSWORD": relation_state["mysql_root_password"],
"RO_DB_OVIM_ROOT_PASSWORD": relation_state["mysql_root_password"],
"RO_DB_NAME": config["ro_database"],
"RO_DB_OVIM_NAME": config["vim_database"],
"OPENMANO_TENANT": config["openmano_tenant"],
}
pod_envconfig = pod_spec._make_pod_envconfig(config, relation_state)
self.assertDictEqual(expected_result, pod_envconfig)
def test_make_startup_probe(self) -> NoReturn:
"""Testing make startup probe."""
expected_result = {
"exec": {"command": ["/usr/bin/pgrep", "python3"]},
"initialDelaySeconds": 60,
"timeoutSeconds": 5,
}
startup_probe = pod_spec._make_startup_probe()
self.assertDictEqual(expected_result, startup_probe)
def test_make_readiness_probe(self) -> NoReturn:
"""Testing make readiness probe."""
port = 9090
expected_result = {
"httpGet": {
"path": "/openmano/tenants",
"port": port,
},
"periodSeconds": 10,
"timeoutSeconds": 5,
"successThreshold": 1,
"failureThreshold": 3,
}
readiness_probe = pod_spec._make_readiness_probe(port)
self.assertDictEqual(expected_result, readiness_probe)
def test_make_liveness_probe(self) -> NoReturn:
"""Testing make liveness probe."""
port = 9090
expected_result = {
"httpGet": {
"path": "/openmano/tenants",
"port": port,
},
"initialDelaySeconds": 600,
"periodSeconds": 10,
"timeoutSeconds": 5,
"successThreshold": 1,
"failureThreshold": 3,
}
liveness_probe = pod_spec._make_liveness_probe(port)
self.assertDictEqual(expected_result, liveness_probe)
def test_make_pod_spec_ng_ro(self) -> NoReturn:
"""Testing make pod spec."""
image_info = {"upstream-source": "opensourcemano/ro:8"}
config = {
"database_commonkey": "osm",
"log_level": "INFO",
"enable_ng_ro": True,
}
relation_state = {
"kafka_host": "kafka",
"kafka_port": "9090",
"mongodb_connection_string": "mongodb://mongo",
}
app_name = "ro"
port = 9090
expected_result = {
"version": 3,
"containers": [
{
"name": app_name,
"imageDetails": image_info,
"imagePullPolicy": "Always",
"ports": [
{
"name": app_name,
"containerPort": port,
"protocol": "TCP",
}
],
"envConfig": {
"OSMRO_LOG_LEVEL": config["log_level"],
"OSMRO_MESSAGE_DRIVER": "kafka",
"OSMRO_MESSAGE_HOST": relation_state["kafka_host"],
"OSMRO_MESSAGE_PORT": relation_state["kafka_port"],
"OSMRO_DATABASE_DRIVER": "mongo",
"OSMRO_DATABASE_URI": relation_state[
"mongodb_connection_string"
],
"OSMRO_DATABASE_COMMONKEY": config["database_commonkey"],
},
"kubernetes": {
"startupProbe": {
"exec": {"command": ["/usr/bin/pgrep", "python3"]},
"initialDelaySeconds": 60,
"timeoutSeconds": 5,
},
"readinessProbe": {
"httpGet": {
"path": "/openmano/tenants",
"port": port,
},
"periodSeconds": 10,
"timeoutSeconds": 5,
"successThreshold": 1,
"failureThreshold": 3,
},
"livenessProbe": {
"httpGet": {
"path": "/openmano/tenants",
"port": port,
},
"initialDelaySeconds": 600,
"periodSeconds": 10,
"timeoutSeconds": 5,
"successThreshold": 1,
"failureThreshold": 3,
},
},
}
],
"kubernetesResources": {"ingressResources": []},
}
spec = pod_spec.make_pod_spec(
image_info, config, relation_state, app_name, port
)
self.assertDictEqual(expected_result, spec)
def test_make_pod_spec_no_ng_ro(self) -> NoReturn:
"""Testing make pod spec."""
image_info = {"upstream-source": "opensourcemano/ro:8"}
config = {
"log_level": "INFO",
"enable_ng_ro": False,
"vim_database": "mano_vim_db",
"ro_database": "mano_db",
"openmano_tenant": "osm",
}
relation_state = {
"mysql_host": "mysql",
"mysql_port": 3306,
"mysql_user": "mano",
"mysql_password": "manopw",
"mysql_root_password": "rootmanopw",
}
app_name = "ro"
port = 9090
expected_result = {
"version": 3,
"containers": [
{
"name": app_name,
"imageDetails": image_info,
"imagePullPolicy": "Always",
"ports": [
{
"name": app_name,
"containerPort": port,
"protocol": "TCP",
}
],
"envConfig": {
"OSMRO_LOG_LEVEL": config["log_level"],
"RO_DB_HOST": relation_state["mysql_host"],
"RO_DB_OVIM_HOST": relation_state["mysql_host"],
"RO_DB_PORT": relation_state["mysql_port"],
"RO_DB_OVIM_PORT": relation_state["mysql_port"],
"RO_DB_USER": relation_state["mysql_user"],
"RO_DB_OVIM_USER": relation_state["mysql_user"],
"RO_DB_PASSWORD": relation_state["mysql_password"],
"RO_DB_OVIM_PASSWORD": relation_state["mysql_password"],
"RO_DB_ROOT_PASSWORD": relation_state["mysql_root_password"],
"RO_DB_OVIM_ROOT_PASSWORD": relation_state[
"mysql_root_password"
],
"RO_DB_NAME": config["ro_database"],
"RO_DB_OVIM_NAME": config["vim_database"],
"OPENMANO_TENANT": config["openmano_tenant"],
},
"kubernetes": {
"startupProbe": {
"exec": {"command": ["/usr/bin/pgrep", "python3"]},
"initialDelaySeconds": 60,
"timeoutSeconds": 5,
},
"readinessProbe": {
"httpGet": {
"path": "/openmano/tenants",
"port": port,
},
"periodSeconds": 10,
"timeoutSeconds": 5,
"successThreshold": 1,
"failureThreshold": 3,
},
"livenessProbe": {
"httpGet": {
"path": "/openmano/tenants",
"port": port,
},
"initialDelaySeconds": 600,
"periodSeconds": 10,
"timeoutSeconds": 5,
"successThreshold": 1,
"failureThreshold": 3,
},
},
}
],
"kubernetesResources": {"ingressResources": []},
}
spec = pod_spec.make_pod_spec(
image_info, config, relation_state, app_name, port
)
self.assertDictEqual(expected_result, spec)
def test_make_pod_spec_without_image_info(self) -> NoReturn:
"""Testing make pod spec without image_info."""
image_info = None
config = {
"enable_ng_ro": True,
"database_commonkey": "osm",
"log_level": "INFO",
}
relation_state = {
"kafka_host": "kafka",
"kafka_port": 9090,
"mongodb_connection_string": "mongodb://mongo",
}
app_name = "ro"
port = 9090
spec = pod_spec.make_pod_spec(
image_info, config, relation_state, app_name, port
)
self.assertIsNone(spec)
def test_make_pod_spec_without_config(self) -> NoReturn:
"""Testing make pod spec without config."""
image_info = {"upstream-source": "opensourcemano/ro:8"}
config = {}
relation_state = {
"kafka_host": "kafka",
"kafka_port": 9090,
"mongodb_connection_string": "mongodb://mongo",
}
app_name = "ro"
port = 9090
with self.assertRaises(ValueError):
pod_spec.make_pod_spec(image_info, config, relation_state, app_name, port)
def test_make_pod_spec_without_relation_state(self) -> NoReturn:
"""Testing make pod spec without relation_state."""
image_info = {"upstream-source": "opensourcemano/ro:8"}
config = {
"enable_ng_ro": True,
"database_commonkey": "osm",
"log_level": "INFO",
}
relation_state = {}
app_name = "ro"
port = 9090
with self.assertRaises(ValueError):
pod_spec.make_pod_spec(image_info, config, relation_state, app_name, port)
if __name__ == "__main__":
unittest.main()
| 36.141026
| 86
| 0.486485
| 1,191
| 14,095
| 5.422334
| 0.161209
| 0.084546
| 0.061319
| 0.032053
| 0.826107
| 0.79839
| 0.760452
| 0.73769
| 0.720037
| 0.708269
| 0
| 0.014783
| 0.404895
| 14,095
| 389
| 87
| 36.233933
| 0.755126
| 0.079603
| 0
| 0.724359
| 0
| 0
| 0.258165
| 0.022341
| 0
| 0
| 0
| 0
| 0.035256
| 1
| 0.035256
| false
| 0.041667
| 0.009615
| 0
| 0.048077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0509e4f9f7725b0d3f91e494d40540abca8dd769
| 66,916
|
py
|
Python
|
code/python/PAEngine/v3/fds/sdk/PAEngine/api/templated_pa_components_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/PAEngine/v3/fds/sdk/PAEngine/api/templated_pa_components_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/PAEngine/v3/fds/sdk/PAEngine/api/templated_pa_components_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
PA Engine API
Allow clients to fetch Analytics through APIs. # noqa: E501
The version of the OpenAPI document: 3
Contact: analytics.api.support@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from multiprocessing.pool import ApplyResult
import typing
from fds.sdk.PAEngine.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.PAEngine.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.PAEngine.exceptions import ApiException
from fds.sdk.PAEngine.model.client_error_response import ClientErrorResponse
from fds.sdk.PAEngine.model.templated_pa_component_parameters_root import TemplatedPAComponentParametersRoot
from fds.sdk.PAEngine.model.templated_pa_component_post_summary_root import TemplatedPAComponentPostSummaryRoot
from fds.sdk.PAEngine.model.templated_pa_component_root import TemplatedPAComponentRoot
from fds.sdk.PAEngine.model.templated_pa_component_summary_root import TemplatedPAComponentSummaryRoot
from fds.sdk.PAEngine.model.templated_pa_component_update_parameters_root import TemplatedPAComponentUpdateParametersRoot
class TemplatedPAComponentsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.create_templated_pa_components_endpoint = _Endpoint(
settings={
'response_type': (
{ 201: (TemplatedPAComponentPostSummaryRoot,), 400: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/pa/v3/templated-components',
'operation_id': 'create_templated_pa_components',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'templated_pa_component_parameters_root',
],
'required': [
'templated_pa_component_parameters_root',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'templated_pa_component_parameters_root':
(TemplatedPAComponentParametersRoot,),
},
'attribute_map': {
},
'location_map': {
'templated_pa_component_parameters_root': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/plain',
'application/json',
'text/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.delete_templated_pa_components_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/pa/v3/templated-components/{id}',
'operation_id': 'delete_templated_pa_components',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/plain',
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_templated_pa_component_by_id_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (TemplatedPAComponentRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/pa/v3/templated-components/{id}',
'operation_id': 'get_templated_pa_component_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_templated_pa_components_in_path_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (TemplatedPAComponentSummaryRoot,), 400: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/pa/v3/templated-components',
'operation_id': 'get_templated_pa_components_in_path',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'directory',
],
'required': [
'directory',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'directory':
(str,),
},
'attribute_map': {
'directory': 'directory',
},
'location_map': {
'directory': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.update_templated_pa_components_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (TemplatedPAComponentPostSummaryRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/pa/v3/templated-components/{id}',
'operation_id': 'update_templated_pa_components',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'templated_pa_component_update_parameters_root',
],
'required': [
'id',
'templated_pa_component_update_parameters_root',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'templated_pa_component_update_parameters_root':
(TemplatedPAComponentUpdateParametersRoot,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'templated_pa_component_update_parameters_root': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
@staticmethod
def apply_kwargs_defaults(kwargs, return_http_data_only, async_req):
kwargs["async_req"] = async_req
kwargs["_return_http_data_only"] = return_http_data_only
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
def create_templated_pa_components(
self,
templated_pa_component_parameters_root,
**kwargs
) -> TemplatedPAComponentPostSummaryRoot:
"""Create templated PA component # noqa: E501
This endpoint creates new component based off of linked PA template or unlinked PA template. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
templated_pa_component_parameters_root (TemplatedPAComponentParametersRoot): Request Parameters
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentPostSummaryRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['templated_pa_component_parameters_root'] = \
templated_pa_component_parameters_root
return self.create_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def create_templated_pa_components_with_http_info(
self,
templated_pa_component_parameters_root,
**kwargs
) -> typing.Tuple[TemplatedPAComponentPostSummaryRoot, int, typing.MutableMapping]:
"""Create templated PA component # noqa: E501
This endpoint creates new component based off of linked PA template or unlinked PA template. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
templated_pa_component_parameters_root (TemplatedPAComponentParametersRoot): Request Parameters
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentPostSummaryRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['templated_pa_component_parameters_root'] = \
templated_pa_component_parameters_root
return self.create_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def create_templated_pa_components_async(
self,
templated_pa_component_parameters_root,
**kwargs
) -> "ApplyResult[TemplatedPAComponentPostSummaryRoot]":
"""Create templated PA component # noqa: E501
This endpoint creates new component based off of linked PA template or unlinked PA template. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
templated_pa_component_parameters_root (TemplatedPAComponentParametersRoot): Request Parameters
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[TemplatedPAComponentPostSummaryRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['templated_pa_component_parameters_root'] = \
templated_pa_component_parameters_root
return self.create_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def create_templated_pa_components_with_http_info_async(
self,
templated_pa_component_parameters_root,
**kwargs
) -> "ApplyResult[typing.Tuple[TemplatedPAComponentPostSummaryRoot, int, typing.MutableMapping]]":
"""Create templated PA component # noqa: E501
This endpoint creates new component based off of linked PA template or unlinked PA template. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
templated_pa_component_parameters_root (TemplatedPAComponentParametersRoot): Request Parameters
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(TemplatedPAComponentPostSummaryRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['templated_pa_component_parameters_root'] = \
templated_pa_component_parameters_root
return self.create_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def delete_templated_pa_components(
self,
id="01234567890123456789012345678901",
**kwargs
) -> None:
"""Delete templated PA component # noqa: E501
This endpoint deletes an existing templated PA component # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
None
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.delete_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def delete_templated_pa_components_with_http_info(
self,
id="01234567890123456789012345678901",
**kwargs
) -> typing.Tuple[None, int, typing.MutableMapping]:
"""Delete templated PA component # noqa: E501
This endpoint deletes an existing templated PA component # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
None
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.delete_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def delete_templated_pa_components_async(
self,
id="01234567890123456789012345678901",
**kwargs
) -> "ApplyResult[None]":
"""Delete templated PA component # noqa: E501
This endpoint deletes an existing templated PA component # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[None]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.delete_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def delete_templated_pa_components_with_http_info_async(
self,
id="01234567890123456789012345678901",
**kwargs
) -> "ApplyResult[typing.Tuple[None, int, typing.MutableMapping]]":
"""Delete templated PA component # noqa: E501
This endpoint deletes an existing templated PA component # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(None, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.delete_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_component_by_id(
self,
id="01234567890123456789012345678901",
**kwargs
) -> TemplatedPAComponentRoot:
"""Get templated PA component by id # noqa: E501
This endpoint fetches the templated PA component settings. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.get_templated_pa_component_by_id_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_component_by_id_with_http_info(
self,
id="01234567890123456789012345678901",
**kwargs
) -> typing.Tuple[TemplatedPAComponentRoot, int, typing.MutableMapping]:
"""Get templated PA component by id # noqa: E501
This endpoint fetches the templated PA component settings. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.get_templated_pa_component_by_id_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_component_by_id_async(
self,
id="01234567890123456789012345678901",
**kwargs
) -> "ApplyResult[TemplatedPAComponentRoot]":
"""Get templated PA component by id # noqa: E501
This endpoint fetches the templated PA component settings. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[TemplatedPAComponentRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.get_templated_pa_component_by_id_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_component_by_id_with_http_info_async(
self,
id="01234567890123456789012345678901",
**kwargs
) -> "ApplyResult[typing.Tuple[TemplatedPAComponentRoot, int, typing.MutableMapping]]":
"""Get templated PA component by id # noqa: E501
This endpoint fetches the templated PA component settings. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(TemplatedPAComponentRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.get_templated_pa_component_by_id_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_components_in_path(
self,
directory="Personal:TemplatedPAComponents/",
**kwargs
) -> TemplatedPAComponentSummaryRoot:
"""Get templated PA components in path # noqa: E501
This endpoint returns the list of templated PA components in path. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
directory (str): Get templated PA components in path. defaults to "Personal:TemplatedPAComponents/", must be one of ["Personal:TemplatedPAComponents/"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentSummaryRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['directory'] = \
directory
return self.get_templated_pa_components_in_path_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_components_in_path_with_http_info(
self,
directory="Personal:TemplatedPAComponents/",
**kwargs
) -> typing.Tuple[TemplatedPAComponentSummaryRoot, int, typing.MutableMapping]:
"""Get templated PA components in path # noqa: E501
This endpoint returns the list of templated PA components in path. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
directory (str): Get templated PA components in path. defaults to "Personal:TemplatedPAComponents/", must be one of ["Personal:TemplatedPAComponents/"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentSummaryRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['directory'] = \
directory
return self.get_templated_pa_components_in_path_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_components_in_path_async(
self,
directory="Personal:TemplatedPAComponents/",
**kwargs
) -> "ApplyResult[TemplatedPAComponentSummaryRoot]":
"""Get templated PA components in path # noqa: E501
This endpoint returns the list of templated PA components in path. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
directory (str): Get templated PA components in path. defaults to "Personal:TemplatedPAComponents/", must be one of ["Personal:TemplatedPAComponents/"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[TemplatedPAComponentSummaryRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['directory'] = \
directory
return self.get_templated_pa_components_in_path_endpoint.call_with_http_info(**kwargs)
def get_templated_pa_components_in_path_with_http_info_async(
self,
directory="Personal:TemplatedPAComponents/",
**kwargs
) -> "ApplyResult[typing.Tuple[TemplatedPAComponentSummaryRoot, int, typing.MutableMapping]]":
"""Get templated PA components in path # noqa: E501
This endpoint returns the list of templated PA components in path. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
directory (str): Get templated PA components in path. defaults to "Personal:TemplatedPAComponents/", must be one of ["Personal:TemplatedPAComponents/"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(TemplatedPAComponentSummaryRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['directory'] = \
directory
return self.get_templated_pa_components_in_path_endpoint.call_with_http_info(**kwargs)
def update_templated_pa_components(
self,
templated_pa_component_update_parameters_root,
id="01234567890123456789012345678901",
**kwargs
) -> TemplatedPAComponentPostSummaryRoot:
"""Update templated PA component # noqa: E501
This endpoint allows the user to change the request body from an existing templated PA component. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
templated_pa_component_update_parameters_root (TemplatedPAComponentUpdateParametersRoot): Request Parameters
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentPostSummaryRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
kwargs['templated_pa_component_update_parameters_root'] = \
templated_pa_component_update_parameters_root
return self.update_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def update_templated_pa_components_with_http_info(
self,
templated_pa_component_update_parameters_root,
id="01234567890123456789012345678901",
**kwargs
) -> typing.Tuple[TemplatedPAComponentPostSummaryRoot, int, typing.MutableMapping]:
"""Update templated PA component # noqa: E501
This endpoint allows the user to change the request body from an existing templated PA component. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
templated_pa_component_update_parameters_root (TemplatedPAComponentUpdateParametersRoot): Request Parameters
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
TemplatedPAComponentPostSummaryRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
kwargs['templated_pa_component_update_parameters_root'] = \
templated_pa_component_update_parameters_root
return self.update_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def update_templated_pa_components_async(
self,
templated_pa_component_update_parameters_root,
id="01234567890123456789012345678901",
**kwargs
) -> "ApplyResult[TemplatedPAComponentPostSummaryRoot]":
"""Update templated PA component # noqa: E501
This endpoint allows the user to change the request body from an existing templated PA component. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
templated_pa_component_update_parameters_root (TemplatedPAComponentUpdateParametersRoot): Request Parameters
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[TemplatedPAComponentPostSummaryRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
kwargs['templated_pa_component_update_parameters_root'] = \
templated_pa_component_update_parameters_root
return self.update_templated_pa_components_endpoint.call_with_http_info(**kwargs)
def update_templated_pa_components_with_http_info_async(
self,
templated_pa_component_update_parameters_root,
id="01234567890123456789012345678901",
**kwargs
) -> "ApplyResult[typing.Tuple[TemplatedPAComponentPostSummaryRoot, int, typing.MutableMapping]]":
"""Update templated PA component # noqa: E501
This endpoint allows the user to change the request body from an existing templated PA component. Remarks: * Any settings in the POST body will act as a one-time override over the settings saved in the PA template. * Multi-horizon frequencies are not supported through this endpoint. * Componentdetail supports securities, groups, and totals as well but if we don't pass anything that defaults to securities. * If we are overriding the grouping with a frequency, we will be overriding the grouping saved to the original component and also overriding the default frequency of the Beginning of Period to whatever we pass in the request body. * If we are overriding gouping frequency without overriding the group id it will not be applied to the default groupings saved to the original component. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
templated_pa_component_update_parameters_root (TemplatedPAComponentUpdateParametersRoot): Request Parameters
id (str): Unique identifier for a templated PA component. defaults to "01234567890123456789012345678901", must be one of ["01234567890123456789012345678901"]
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(TemplatedPAComponentPostSummaryRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
kwargs['templated_pa_component_update_parameters_root'] = \
templated_pa_component_update_parameters_root
return self.update_templated_pa_components_endpoint.call_with_http_info(**kwargs)
| 51.752514
| 853
| 0.617266
| 7,495
| 66,916
| 5.359973
| 0.036825
| 0.040251
| 0.047295
| 0.018918
| 0.957733
| 0.947253
| 0.937172
| 0.928136
| 0.915366
| 0.908371
| 0
| 0.029834
| 0.326275
| 66,916
| 1,292
| 854
| 51.79257
| 0.861256
| 0.611842
| 0
| 0.692008
| 1
| 0
| 0.188159
| 0.111262
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042885
| false
| 0
| 0.025341
| 0
| 0.109162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0554e1e3a2a5e4cb2c73423f4dd2fd5b1c26f05e
| 13,245
|
py
|
Python
|
game/x (1).py
|
gautamig54/battleship
|
08abc537eca83b3c1e2688a7cca1a1f5b9019fdd
|
[
"MIT"
] | 2
|
2018-02-28T18:01:47.000Z
|
2018-02-28T18:05:09.000Z
|
game/x (1).py
|
gautamig54/battleship
|
08abc537eca83b3c1e2688a7cca1a1f5b9019fdd
|
[
"MIT"
] | null | null | null |
game/x (1).py
|
gautamig54/battleship
|
08abc537eca83b3c1e2688a7cca1a1f5b9019fdd
|
[
"MIT"
] | null | null | null |
def display_invalid():
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Sorry, Invalid block")
import pygame,sys
from checks import *
from ships import *
black = (0,0,0)
white = (255,255,255)
blue = (0,0,255)
green = (0,255,0)
red = (255,0,0)
pygame.init()
screen = pygame.display.set_mode((1300,700))
pygame.display.set_caption("Battleship")
clock = pygame.time.Clock()
pygame.draw.rect(screen,white,(0,0,650,700))
pygame.draw.rect(screen,black,(650,0,650,700))
rect1= []
rectplayer1 = []
rectplayer2 = []
rect = [rectplayer1,rectplayer2]
count = 0
count1 = 3
ctr = 0
flag = 0
flag1 = 0
flag2 = 0
flag3 = 0
flag4 = 0
ctr = 0
check = 0
for i in range(10):
for j in range(10):
pygame.draw.rect(screen,black,((j+2)*50,(i+2)*50,50,50),2)
for i in range(14,24):
for j in range(10):
pygame.draw.rect(screen,white,((i)*50,(j+2)*50,50,50),2)
myfont = pygame.font.SysFont("monospace",30)
text1 = myfont.render("Player 1",1, black)
text2 = myfont.render("Player 2",1, white)
screen.blit(text1, (250,50))
screen.blit(text2, (900,50))
pygame.display.update()
#Input Loop - start
while True and ctr<2:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
print (event)
if ctr == 0 and flag == 0:
pygame.draw.circle(screen, black, (230,60),10)
pygame.display.update()
if ctr == 1 and flag == 0:
pygame.draw.circle(screen, white, (870,60),10)
pygame.display.update()
if flag == 0:
flag = 1
import Tkinter as tk
import tkMessageBox
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player " + str(ctr+1) + ", place your destroyer, 2 adjacent blocks")
if count == 2 and flag == 1:
count+=1
flag = 2
if flag == 1 and event.type == pygame.MOUSEBUTTONDOWN :
flag2 = 0
[x,y] = pygame.mouse.get_pos()
x = (x/50)
y = (y/50)
if check_if_in_box(ctr,x,y) == 1:
flag2 = 1
display_invalid()
if check_if_ship_present(ctr,x,y,rect,rect1) == 1:
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Sorry, there already is a ship on this block")
if count<2 and flag2 == 0:
if count == 1:
flag1 = 0
if check_if_valid(x,y,rect1):
flag1 = 0
check = check_orientation(rect1[0],[x,y])
else:
display_invalid()
flag1 = 1
if flag1 == 0 or count == 0:
rect1.append([x,y])
pygame.draw.rect(screen,blue,(x*50,y*50,50,50))
pygame.display.update()
count += 1
if count == 3 and flag == 2:
rect[ctr].append(rect1)
rect1 = []
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player " + str(ctr+1) + ", place your submarine, 3 adjacent blocks")
count = 0
flag = 3
if count == 3 and flag == 3:
count+=1
flag = 4
if event.type == pygame.MOUSEBUTTONDOWN and flag == 3:
flag2 = 0
[x,y] = pygame.mouse.get_pos()
x = (x/50)
y = (y/50)
if check_if_in_box(ctr,x,y) == 1:
flag2 = 1
display_invalid()
if check_if_ship_present(ctr,x,y,rect,rect1) == 1:
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Sorry, there already is a ship on this block")
if count<3 and flag2 == 0:
if count == 1:
flag1 = 0
if check_if_valid(x,y,rect1):
flag1 = 0
check = check_orientation(rect1[0],[x,y])
print check
else:
display_invalid()
flag1 = 1
if count>1:
print check
if check == 0:
print check_if_horizontal(x,y,rect1,count)
if check_if_horizontal(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
else:
if check_if_vertical(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
if flag1 == 0 or count == 0:
rect1.append([x,y])
pygame.draw.rect(screen,blue,(x*50,y*50,50,50))
pygame.display.update()
count += 1
if count == 4 and flag == 4:
rect[ctr].append(rect1)
rect1 = []
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player " + str(ctr+1) + ", place your cruiser, 3 adjacent blocks")
count = 0
flag = 5
if count == 3 and flag == 5:
count+=1
flag = 6
if event.type == pygame.MOUSEBUTTONDOWN and flag == 5:
flag2 = 0
[x,y] = pygame.mouse.get_pos()
x = (x/50)
y = (y/50)
if check_if_in_box(ctr,x,y) == 1:
flag2 = 1
display_invalid()
if check_if_ship_present(ctr,x,y,rect,rect1) == 1:
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Sorry, there already is a ship on this block")
if count<3 and flag2 == 0:
if count == 1:
flag1 = 0
if check_if_valid(x,y,rect1):
flag1 = 0
check = check_orientation(rect1[0],[x,y])
print check
else:
display_invalid()
flag1 = 1
if count>1:
print check
if check == 0:
print check_if_horizontal(x,y,rect1,count)
if check_if_horizontal(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
else:
if check_if_vertical(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
if flag1 == 0 or count == 0:
rect1.append([x,y])
pygame.draw.rect(screen,blue,(x*50,y*50,50,50))
pygame.display.update()
count += 1
if count == 4 and flag == 6:
rect[ctr].append(rect1)
rect1 = []
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player " + str(ctr+1) + ", place your battleship, 4 adjacent blocks")
count = 0
flag = 7
if count == 4 and flag == 7:
count+=1
flag = 8
if event.type == pygame.MOUSEBUTTONDOWN and flag == 7:
flag2 = 0
[x,y] = pygame.mouse.get_pos()
x = (x/50)
y = (y/50)
if check_if_in_box(ctr,x,y) == 1:
flag2 = 1
display_invalid()
if check_if_ship_present(ctr,x,y,rect,rect1) == 1:
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Sorry, there already is a ship on this block")
if count<4 and flag2 == 0:
if count == 1:
flag1 = 0
if check_if_valid(x,y,rect1):
flag1 = 0
check = check_orientation(rect1[0],[x,y])
print check
else:
display_invalid()
flag1 = 1
if count>1:
print check
if check == 0:
print check_if_horizontal(x,y,rect1,count)
if check_if_horizontal(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
else:
if check_if_vertical(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
if flag1 == 0 or count == 0:
rect1.append([x,y])
pygame.draw.rect(screen,blue,(x*50,y*50,50,50))
pygame.display.update()
count += 1
if count == 5 and flag == 8:
rect[ctr].append(rect1)
rect1 = []
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player " + str(ctr+1) + ", place your carrier, 5 adjacent blocks")
count = 0
flag = 9
if count == 5 and flag == 9:
count+=1
flag = 10
if event.type == pygame.MOUSEBUTTONDOWN and flag == 9:
flag2 = 0
[x,y] = pygame.mouse.get_pos()
x = (x/50)
y = (y/50)
if check_if_in_box(ctr,x,y) == 1:
flag2 = 1
display_invalid()
if check_if_ship_present(ctr,x,y,rect,rect1) == 1:
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Sorry, there already is a ship on this block")
if count<5 and flag2 == 0:
if count == 1:
flag1 = 0
if check_if_valid(x,y,rect1):
flag1 = 0
check = check_orientation(rect1[0],[x,y])
print check
else:
display_invalid()
flag1 = 1
if count>1:
print check
if check == 0:
print check_if_horizontal(x,y,rect1,count)
if check_if_horizontal(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
else:
if check_if_vertical(x,y,rect1,count) == 1:
flag1 = 0
else:
flag1 = 1
display_invalid()
if flag1 == 0 or count == 0:
rect1.append([x,y])
pygame.draw.rect(screen,blue,(x*50,y*50,50,50))
pygame.display.update()
count += 1
if count == 6 and flag == 10:
rect[ctr].append(rect1)
rect1 = []
count = 0
flag = 0
ctr += 1
pygame.draw.rect(screen,white,(0,0,650,700))
for i in range(10):
for j in range(10):
pygame.draw.rect(screen,black,((j+2)*50,(i+2)*50,50,50),2)
screen.blit(text1, (250,50))
pygame.display.update()
#Input Loop - end
#Game Loop - start
if ctr == 2:
moves_of_p1 = []
moves_of_p2 = []
index1 = -1
for i in range(5):
rectplayer1[i] = map(lambda x : (x[0]+12,x[1]),rectplayer1[i])
for i in range(5):
rectplayer2[i] = map(lambda x : (x[0]-12,x[1]),rectplayer2[i])
rect = [rectplayer1,rectplayer2]
myfont = pygame.font.SysFont("monospace",30)
text1 = myfont.render("Player 1",1, black)
text2 = myfont.render("Player 2",1, white)
text3 = myfont.render(" = Miss",1,black)
text4 = myfont.render("Hit = ",1,white)
ctr = 0
pygame.draw.rect(screen,white,(0,0,650,700))
pygame.draw.rect(screen,black,(650,0,650,700))
for i in range(10):
for j in range(10):
pygame.draw.rect(screen,black,((j+2)*50,(i+2)*50,50,50),2)
for i in range(14,24):
for j in range(10):
pygame.draw.rect(screen,white,((i)*50,(j+2)*50,50,50),2)
screen.blit(text1, (250,50))
screen.blit(text2, (900,50))
screen.blit(text3, (505,50))
screen.blit(text4, (680,50))
pygame.draw.rect(screen,green,(450,40,50,50))
pygame.draw.rect(screen,red,(800,40,50,50))
pygame.display.update()
while True and ctr<=2:
flag2 = 0
if flag == 0:
flag = 1
import random
ctr = random.randrange(1,3)
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player " + str(ctr) + " starts the attack!!")
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if ctr == 1:
pygame.draw.circle(screen, black, (230,60),10)
pygame.draw.circle(screen, black, (870,60),10)
elif ctr == 2:
pygame.draw.circle(screen, white, (870,60),10)
pygame.draw.circle(screen, white, (230,60),10)
pygame.display.update()
if event.type == pygame.MOUSEBUTTONDOWN:
if ctr == 1:
if check_if_in_box(ctr,x,y):
flag2 = 1
display_invalid()
if check_if_attacked(ctr,x,y,moves_of_p1,moves_of_p2):
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "This block has already been attacked")
flag4 = 0
[x,y] = pygame.mouse.get_pos()
x = x/50
y = y/50
if flag2 == 0:
index1 = -1
for i in rect[1]:
index1 += 1
index = -1
for [xc,yc] in i:
index += 1
if x == xc and y == yc:
flag4 = 1
moves_of_p1.append([x,y])
pygame.draw.rect(screen,red,(x*50,y*50,50,50))
pygame.display.update()
rect[1][index1].pop(index)
rect[1][index1].append([0,0])
break
if flag4 == 1:
break
if flag4 == 1:
if rect[1][index1][0][0] == 0 and rect[1][index1][0][1] == 0:
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Congratulations, you have sunken the Player 2's " + check_ship(index1).name + " !!!")
if check_if_win(rect,ctr) == 1:
ctr = 3
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player 1 wins!!!")
ctr += 1
if flag4 == 0 and flag2 == 0:
moves_of_p1.append([x,y])
pygame.draw.rect(screen, green, (x*50, y*50, 50, 50))
pygame.display.update()
ctr += 1
elif ctr == 2:
if check_if_in_box(ctr,x,y):
flag2 = 1
display_invalid()
if check_if_attacked(ctr,x,y,moves_of_p1,moves_of_p2):
flag2 = 1
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "This block has already been attacked")
flag4 = 0
[x,y] = pygame.mouse.get_pos()
x = x/50
y = y/50
if flag2 == 0:
index1 = -1
for i in rect[0]:
index1 += 1
index = -1
for [xc,yc] in i:
index += 1
if x == xc and y == yc:
flag4 = 1
moves_of_p2.append([x,y])
pygame.draw.rect(screen,red,(x*50,y*50,50,50))
pygame.display.update()
rect[0][index1].pop(index)
rect[0][index1].append([0,0])
break
if flag4 == 1:
break
if flag4 == 1:
if rect[0][index1][0][0] == 0 and rect[0][index1][0][1] == 0:
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Congratulations, you have sunken Player 1's " + check_ship(index1).name + " !!!")
if check_if_win(rect,ctr):
ctr = 4
root = tk.Tk()
root.withdraw()
tkMessageBox.showinfo("Message", "Player 2 wins!!!")
ctr-=1
if flag4 == 0 and flag2 == 0:
moves_of_p2.append([x,y])
pygame.draw.rect(screen, green, (x*50, y*50, 50, 50))
pygame.display.update()
ctr -= 1
if ctr>2:
pygame.quit()
sys.exit()
| 26.437126
| 127
| 0.578784
| 2,047
| 13,245
| 3.679043
| 0.083048
| 0.01381
| 0.034657
| 0.055769
| 0.870402
| 0.827247
| 0.792591
| 0.764042
| 0.764042
| 0.720887
| 0
| 0.082521
| 0.264402
| 13,245
| 500
| 128
| 26.49
| 0.690444
| 0.003926
| 0
| 0.802998
| 0
| 0
| 0.068775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012848
| null | null | 0.027837
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
056d057c4067c4e1600a17bbb7fefecb8e07e970
| 380
|
py
|
Python
|
tools/micropython-mockup/dht.py
|
hwinther/lanot
|
f6700cacb3946535081624467b746fdfd38e021d
|
[
"Apache-2.0"
] | null | null | null |
tools/micropython-mockup/dht.py
|
hwinther/lanot
|
f6700cacb3946535081624467b746fdfd38e021d
|
[
"Apache-2.0"
] | null | null | null |
tools/micropython-mockup/dht.py
|
hwinther/lanot
|
f6700cacb3946535081624467b746fdfd38e021d
|
[
"Apache-2.0"
] | null | null | null |
class DHT11:
def __init__(self, pin):
self.pin = pin
def measure(self):
pass
def temperature(self):
return 0
def humidity(self):
return 0
class DHT22:
def __init__(self, pin):
self.pin = pin
def measure(self):
pass
def temperature(self):
return 0
def humidity(self):
return 0
| 14.074074
| 28
| 0.542105
| 46
| 380
| 4.304348
| 0.282609
| 0.141414
| 0.222222
| 0.141414
| 0.89899
| 0.89899
| 0.89899
| 0.89899
| 0.89899
| 0.89899
| 0
| 0.033613
| 0.373684
| 380
| 26
| 29
| 14.615385
| 0.798319
| 0
| 0
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.111111
| 0
| 0.222222
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 12
|
55a37c77fbc5358686f075c1cdc7277796fc16db
| 28,197
|
py
|
Python
|
hitlist.py
|
danish10499/hitlist-ipv4
|
68a9c859f37f2949366e201d0fc4e9271a2a8c1d
|
[
"MIT"
] | null | null | null |
hitlist.py
|
danish10499/hitlist-ipv4
|
68a9c859f37f2949366e201d0fc4e9271a2a8c1d
|
[
"MIT"
] | null | null | null |
hitlist.py
|
danish10499/hitlist-ipv4
|
68a9c859f37f2949366e201d0fc4e9271a2a8c1d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import argparse
import csv
import os
import subprocess
import random
import pandas as pd
import calendar
import time
import datetime
import dateutil.relativedelta
from sampling import *
ONE_MILLION = 1000000
HUNDRED_THOUSAND = 100000
TEN_THOUSAND = 10000
THOUSAND_FIVE_HUNDRED = 1500
def driver(inputfile, protocol, characteristic, size, time, error, output, force, present_time):
with open(inputfile, 'rb') as t:
header_feat = t.readline().rstrip().decode("utf-8").split(',')
try:
host_col_pos = str(header_feat.index('host')+1)
except:
host_col_pos = 0
try:
pref_col_pos = str(header_feat.index('prefix_length')+1)
except:
pref_col_pos = 0
try:
asn_col_pos = str(header_feat.index('asn')+1)
except:
asn_col_pos = 0
try:
ver_col_pos = str(header_feat.index('protocol')+1)
except:
ver_col_pos = 0
if host_col_pos == 0:
print('***ERROR*** Host information is missing (IP column should be named as host)')
return
if time != 0:
past_time = datetime.datetime.fromtimestamp(time)
rd = dateutil.relativedelta.relativedelta (present_time,past_time)
if rd.months > 2:
print('***WARNING*** A fresh scan is recommended to better capture the Internet behaviour but hitlist is being generated for the given input')
if characteristic == 'cross_response':
random_sampler(inputfile, THOUSAND_FIVE_HUNDRED, output)
return
if force == 'random' and size == 0:
random_sampler(inputfile, HUNDRED_THOUSAND, output)
elif force == 'random':
random_sampler(inputfile, size, output)
return
if ver_col_pos == 0 and asn_col_pos == 0 and pref_col_pos == 0 and characteristic != 'cross_response':
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require either Protocol Version or Prefix Length details')
return
if protocol == 'TLS':
if characteristic == 'all_version':
if size == 0:
if error == 1:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Protocol Version detail)')
return
elif error == 2:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Protocol Version details)')
return
elif error >= 5:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
elif pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require either Protocol Version or Prefix Length details)')
return
else:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
elif pref_col_pos != 0 and size <= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require either Protocol Version or Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == 'no_null_version':
if size == 0:
if error == 1:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Protocol Version detail)')
return
elif 2 <= error <= 5:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Protocol Version details)')
return
elif error > 5:
if ver_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require either Protocol Version or Prefix Length details)')
return
else:
if ver_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either elevant information to perform stratified sampling is missing (Require Protocol Version details) or mentioned size do not perform well')
return
##################
elif characteristic == 'all_prefix-length':
if size == 0:
if error == 1:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', ONE_MILLION, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif 2 <= error <= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', HUNDRED_THOUSAND, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
elif error > 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', TEN_THOUSAND, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= HUNDRED_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', size, output)
os.system('rm char_samp')
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == 'routable_prefix-length':
if size == 0:
if 1 <= error <= 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error >= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, ver_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == '24_prefix-length':
if size == 0:
if 1 <= error <= 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error >= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
######################################################################################
elif protocol == 'HTTP':
if characteristic == 'all_prefix-length':
if size == 0:
if 1 <= error <= 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', ONE_MILLION, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error >= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', HUNDRED_THOUSAND, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= HUNDRED_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', size, output)
os.system('rm char_samp')
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == 'routable_prefix-length':
if size == 0:
if error == 1:
if asn_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, asn_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
elif pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require ASN or Prefix Length detail)')
return
elif 2 <= error <= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
elif error > 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == '24_prefix-length':
if size == 0:
if error == 1:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error == 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error > 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
######################################################################################
elif protocol == 'DNS':
if characteristic == 'all_prefix-length':
if size == 0:
if 1 <= error <= 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', ONE_MILLION, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error >= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', HUNDRED_THOUSAND, output)
os.system('rm char_samp')
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= HUNDRED_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
cluster_sampler('char_sort.csv', 'char_cum.csv', 'char_samp', size, output)
os.system('rm char_samp')
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == 'routable_prefix-length':
if size == 0:
if error == 1:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require ASN or Prefix Length detail)')
return
elif 2 <= error <= 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
elif error > 5:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
##################
elif characteristic == '24_prefix-length':
if size == 0:
if error == 1:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', ONE_MILLION, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error == 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', HUNDRED_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length detail)')
return
elif error > 2:
if pref_col_pos != 0:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', TEN_THOUSAND, output)
else:
print('***ERROR*** Relevant information to perform stratified sampling is missing (Require Prefix Length details)')
return
else:
if pref_col_pos != 0 and size >= TEN_THOUSAND:
subprocess.check_call(['./bash_input.sh', inputfile, pref_col_pos])
stratified_random_sampler('char_sort.csv', 'char_cum.csv', size, output)
else:
print('***ERROR*** Either relevant information to perform stratified sampling is missing (Require Prefix Length details) or mentioned size do not perform well')
return
if force != 'random' or characteristic != 'cross_response':
os.system('rm char_sort.csv char_cum.csv')
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--in", dest='inputfile', required=True, help="Mention the source file that needs to be sampled")
parser.add_argument("--p", dest='protocol', required=True, help="Mention the protocol of interest",
type=str, choices=['TLS', 'HTTP', 'DNS'])
parser.add_argument("--f", dest='force', help="Force random sample",
type=str, choices=['random'])
parser.add_argument("--c", dest='characteristic', help="Mention the characteristics that hitlist needs to express", default = 'null',
type=str, choices=['all_version', 'no_null_version', 'all_prefix-length', 'routable_prefix-length', '24_prefix-length', 'cross_response'])
parser.add_argument("--s", dest='size', help="Mention the desired sample size", default = 0,
type=int, choices=[1500, 10000, 100000, 1000000])
parser.add_argument("--e", dest='error', help="Mention the acceptable error", default = 5,
type=int, choices=[1, 2, 5, 10])
parser.add_argument("--out", dest='output', help="Directs the output to a name of your choice",
type=str, default = 'ipv4_hitlist_output.csv')
parser.add_argument("--t", dest='time', help="Time of scan (Epoch time)",
type =int, default = 0)
args = parser.parse_args()
if args.force != 'random' and args.characteristic == 'null':
print('usage: hitlist.py [-h] --in INPUTFILE --p {TLS,HTTP,DNS} [--f {random}] --c')
print(' {all_version,no_null_version,all_prefix-length,routable_prefix-length,24_prefix-length,cross_response}')
print(' [--s {1500,10000,100000,1000000}] [--e {1,2,5,10}]')
print(' [--out OUTPUT] [--t TIME]')
print('hitlist.py: error: the following arguments are required: --c')
return
present_time = datetime.datetime.fromtimestamp(time.time())
driver(args.inputfile, args.protocol, args.characteristic, args.size, args.time, args.error, args.output, args.force, present_time)
if __name__ == '__main__':
main()
| 54.120921
| 207
| 0.503777
| 2,752
| 28,197
| 4.951308
| 0.068314
| 0.043153
| 0.050638
| 0.048437
| 0.839278
| 0.829003
| 0.818582
| 0.818509
| 0.818289
| 0.818289
| 0
| 0.011565
| 0.395893
| 28,197
| 521
| 208
| 54.120921
| 0.788364
| 0.000709
| 0
| 0.794118
| 0
| 0.039216
| 0.300481
| 0.00871
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004902
| false
| 0
| 0.026961
| 0
| 0.142157
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55b69b4d9dcdc30f2f0597ee4e7a529262bcbeac
| 13,232
|
py
|
Python
|
codes/torch_similarity/modules/gradient_difference.py
|
GuoShi28/TwoStageAlignment_for_BurstRestoration
|
7abfdbfd6248fbbb1aeae359cf658c5a445c9f40
|
[
"MIT"
] | 21
|
2020-01-16T15:22:26.000Z
|
2022-02-16T17:34:51.000Z
|
codes/torch_similarity/modules/gradient_difference.py
|
GuoShi28/2StageAlign
|
7abfdbfd6248fbbb1aeae359cf658c5a445c9f40
|
[
"MIT"
] | 4
|
2020-01-27T09:03:36.000Z
|
2021-08-09T20:08:42.000Z
|
codes/torch_similarity/modules/gradient_difference.py
|
GuoShi28/2StageAlign
|
7abfdbfd6248fbbb1aeae359cf658c5a445c9f40
|
[
"MIT"
] | 7
|
2020-11-15T09:36:09.000Z
|
2022-03-17T15:24:04.000Z
|
from __future__ import absolute_import
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.parameter import Parameter
import numpy as np
from ..functional import spatial_filter_nd
from .._helper import gauss_kernel_1d
from .._helper import gauss_kernel_2d
from .._helper import gauss_kernel_3d
from .._helper import gradient_kernel_1d
from .._helper import gradient_kernel_2d
from .._helper import gradient_kernel_3d
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return [x, x]
def _grad_param(ndim, method, axis):
if ndim == 1:
kernel = gradient_kernel_1d(method)
elif ndim == 2:
kernel = gradient_kernel_2d(method, axis)
elif ndim == 3:
kernel = gradient_kernel_3d(method, axis)
else:
raise NotImplementedError
kernel = kernel.reshape(1, 1, *kernel.shape)
return Parameter(torch.Tensor(kernel).float())
def _gauss_param(ndim, sigma, truncate):
if ndim == 1:
kernel = gauss_kernel_1d(sigma, truncate)
elif ndim == 2:
kernel = gauss_kernel_2d(sigma, truncate)
elif ndim == 3:
kernel = gauss_kernel_3d(sigma, truncate)
else:
raise NotImplementedError
kernel = kernel.reshape(1, 1, *kernel.shape)
return Parameter(torch.Tensor(kernel).float())
class GradientDifference1d(nn.Module):
""" One-dimensional gradient difference
Args:
grad_method (str, optional): Type of the gradient kernel. Defaults to 'default'.
gauss_sigma (float, optional): Standard deviation for Gaussian kernel. Defaults to None.
gauss_truncate (float, optional): Truncate the Gaussian kernel at this value. Defaults to 4.0.
return_map (bool, optional): If True, also return the correlation map. Defaults to False.
reduction (str, optional): Specifies the reduction to apply to the output:
``'mean'`` | ``'sum'``. Defaults to ``'mean'``.
"""
def __init__(self,
grad_method='default',
gauss_sigma=None,
gauss_truncate=4.0,
return_map=False,
reduction='mean'):
super(GradientDifference1d, self).__init__()
self.grad_method = grad_method
self.gauss_sigma = _pair(gauss_sigma)
self.gauss_truncate = gauss_truncate
self.grad_kernel = None
self.gauss_kernel_x = None
self.gauss_kernel_y = None
self.return_map = return_map
self.reduction = reduction
self._initialize_params()
self._freeze_params()
def _initialize_params(self):
self._initialize_grad_kernel()
self._initialize_gauss_kernel()
def _initialize_grad_kernel(self):
self.grad_kernel = _grad_param(1, self.grad_method, axis=0)
def _initialize_gauss_kernel(self):
if self.gauss_sigma[0] is not None:
self.gauss_kernel_x = _gauss_param(1, self.gauss_sigma[0], self.gauss_truncate)
if self.gauss_sigma[1] is not None:
self.gauss_kernel_y = _gauss_param(1, self.gauss_sigma[1], self.gauss_truncate)
def _check_type_forward(self, x):
if x.dim() != 3:
raise ValueError('expected 3D input (got {}D input)'.format(x.dim()))
def _freeze_params(self):
self.grad_kernel.requires_grad = False
if self.gauss_kernel_x is not None:
self.gauss_kernel_x.requires_grad = False
if self.gauss_kernel_y is not None:
self.gauss_kernel_y.requires_grad = False
def forward(self, x, y):
self._check_type_forward(x)
self._check_type_forward(y)
self._freeze_params()
if x.shape[1] != y.shape[1]:
x = torch.mean(x, dim=1, keepdim=True)
y = torch.mean(y, dim=1, keepdim=True)
# reshape
b, c = x.shape[:2]
spatial_shape = x.shape[2:]
x = x.view(b*c, 1, *spatial_shape)
y = y.view(b*c, 1, *spatial_shape)
# smoothing
if self.gauss_kernel_x is not None:
x = spatial_filter_nd(x, self.gauss_kernel_x)
if self.gauss_kernel_y is not None:
y = spatial_filter_nd(y, self.gauss_kernel_y)
# gradient magnitude
x_grad = torch.abs(spatial_filter_nd(x, self.grad_kernel))
y_grad = torch.abs(spatial_filter_nd(y, self.grad_kernel))
# absolute difference
diff = torch.abs(x_grad - y_grad)
# reshape back
diff_map = diff.view(b, c, *spatial_shape)
if self.reduction == 'mean':
diff = torch.mean(diff_map)
elif self.reduction == 'sum':
diff = torch.sum(diff_map)
else:
raise KeyError('unsupported reduction type: %s' % self.reduction)
if self.return_map:
return diff, diff_map
return diff
class GradientDifference2d(nn.Module):
""" Two-dimensional gradient difference
Args:
grad_method (str, optional): Type of the gradient kernel. Defaults to 'default'.
gauss_sigma (float, optional): Standard deviation for Gaussian kernel. Defaults to None.
gauss_truncate (float, optional): Truncate the Gaussian kernel at this value. Defaults to 4.0.
return_map (bool, optional): If True, also return the correlation map. Defaults to False.
reduction (str, optional): Specifies the reduction to apply to the output:
``'mean'`` | ``'sum'``. Defaults to ``'mean'``.
"""
def __init__(self,
grad_method='default',
gauss_sigma=None,
gauss_truncate=4.0,
return_map=False,
reduction='mean'):
super(GradientDifference2d, self).__init__()
self.grad_method = grad_method
self.gauss_sigma = _pair(gauss_sigma)
self.gauss_truncate = gauss_truncate
self.grad_u_kernel = None
self.grad_v_kernel = None
self.gauss_kernel_x = None
self.gauss_kernel_y = None
self.return_map = return_map
self.reduction = reduction
self._initialize_params()
self._freeze_params()
def _initialize_params(self):
self._initialize_grad_kernel()
self._initialize_gauss_kernel()
def _initialize_grad_kernel(self):
self.grad_u_kernel = _grad_param(2, self.grad_method, axis=0)
self.grad_v_kernel = _grad_param(2, self.grad_method, axis=1)
def _initialize_gauss_kernel(self):
if self.gauss_sigma[0] is not None:
self.gauss_kernel_x = _gauss_param(2, self.gauss_sigma[0], self.gauss_truncate)
if self.gauss_sigma[1] is not None:
self.gauss_kernel_y = _gauss_param(2, self.gauss_sigma[1], self.gauss_truncate)
def _check_type_forward(self, x):
if x.dim() != 4:
raise ValueError('expected 4D input (got {}D input)'.format(x.dim()))
def _freeze_params(self):
self.grad_u_kernel.requires_grad = False
self.grad_v_kernel.requires_grad = False
if self.gauss_kernel_x is not None:
self.gauss_kernel_x.requires_grad = False
if self.gauss_kernel_y is not None:
self.gauss_kernel_y.requires_grad = False
def forward(self, x, y):
self._check_type_forward(x)
self._check_type_forward(y)
self._freeze_params()
if x.shape[1] != y.shape[1]:
x = torch.mean(x, dim=1, keepdim=True)
y = torch.mean(y, dim=1, keepdim=True)
# reshape
b, c = x.shape[:2]
spatial_shape = x.shape[2:]
x = x.view(b*c, 1, *spatial_shape)
y = y.view(b*c, 1, *spatial_shape)
# smoothing
if self.gauss_kernel_x is not None:
x = spatial_filter_nd(x, self.gauss_kernel_x)
if self.gauss_kernel_y is not None:
y = spatial_filter_nd(y, self.gauss_kernel_y)
# gradient magnitude
x_grad_u = torch.abs(spatial_filter_nd(x, self.grad_u_kernel))
x_grad_v = torch.abs(spatial_filter_nd(x, self.grad_v_kernel))
y_grad_u = torch.abs(spatial_filter_nd(y, self.grad_u_kernel))
y_grad_v = torch.abs(spatial_filter_nd(y, self.grad_v_kernel))
# absolute difference
diff_u = torch.abs(x_grad_u - y_grad_u)
diff_v = torch.abs(x_grad_v - y_grad_v)
# reshape back
diff_u = diff_u.view(b, c, *spatial_shape)
diff_v = diff_v.view(b, c, *spatial_shape)
diff_map = 0.5 * (diff_u + diff_v)
if self.reduction == 'mean':
diff = torch.mean(diff_map)
elif self.reduction == 'sum':
diff = torch.sum(diff_map)
else:
raise KeyError('unsupported reduction type: %s' % self.reduction)
if self.return_map:
return diff, diff_map
return diff
class GradientDifference3d(nn.Module):
""" Three-dimensional gradient difference
Args:
grad_method (str, optional): Type of the gradient kernel. Defaults to 'default'.
gauss_sigma (float, optional): Standard deviation for Gaussian kernel. Defaults to None.
gauss_truncate (float, optional): Truncate the Gaussian kernel at this value. Defaults to 4.0.
return_map (bool, optional): If True, also return the correlation map. Defaults to False.
reduction (str, optional): Specifies the reduction to apply to the output:
``'mean'`` | ``'sum'``. Defaults to ``'mean'``.
"""
def __init__(self,
grad_method='default',
gauss_sigma=None,
gauss_truncate=4.0,
return_map=False,
reduction='mean'):
super(GradientDifference3d, self).__init__()
self.grad_method = grad_method
self.gauss_sigma = _pair(gauss_sigma)
self.gauss_truncate = gauss_truncate
self.grad_u_kernel = None
self.grad_v_kernel = None
self.grad_w_kernel = None
self.gauss_kernel_x = None
self.gauss_kernel_y = None
self.return_map = return_map
self.reduction = reduction
self._initialize_params()
self._freeze_params()
def _initialize_params(self):
self._initialize_grad_kernel()
self._initialize_gauss_kernel()
def _initialize_grad_kernel(self):
self.grad_u_kernel = _grad_param(3, self.grad_method, axis=0)
self.grad_v_kernel = _grad_param(3, self.grad_method, axis=1)
self.grad_w_kernel = _grad_param(3, self.grad_method, axis=2)
def _initialize_gauss_kernel(self):
if self.gauss_sigma[0] is not None:
self.gauss_kernel_x = _gauss_param(3, self.gauss_sigma[0], self.gauss_truncate)
if self.gauss_sigma[1] is not None:
self.gauss_kernel_y = _gauss_param(3, self.gauss_sigma[1], self.gauss_truncate)
def _check_type_forward(self, x):
if x.dim() != 5:
raise ValueError('expected 5D input (got {}D input)'.format(x.dim()))
def _freeze_params(self):
self.grad_u_kernel.requires_grad = False
self.grad_v_kernel.requires_grad = False
self.grad_w_kernel.requires_grad = False
if self.gauss_kernel_x is not None:
self.gauss_kernel_x.requires_grad = False
if self.gauss_kernel_y is not None:
self.gauss_kernel_y.requires_grad = False
def forward(self, x, y):
self._check_type_forward(x)
self._check_type_forward(y)
self._freeze_params()
if x.shape[1] != y.shape[1]:
x = torch.mean(x, dim=1, keepdim=True)
y = torch.mean(y, dim=1, keepdim=True)
# reshape
b, c = x.shape[:2]
spatial_shape = x.shape[2:]
x = x.view(b*c, 1, *spatial_shape)
y = y.view(b*c, 1, *spatial_shape)
# smoothing
if self.gauss_kernel_x is not None:
x = spatial_filter_nd(x, self.gauss_kernel_x)
if self.gauss_kernel_y is not None:
y = spatial_filter_nd(y, self.gauss_kernel_y)
# gradient magnitude
x_grad_u = torch.abs(spatial_filter_nd(x, self.grad_u_kernel))
x_grad_v = torch.abs(spatial_filter_nd(x, self.grad_v_kernel))
x_grad_w = torch.abs(spatial_filter_nd(x, self.grad_w_kernel))
y_grad_u = torch.abs(spatial_filter_nd(y, self.grad_u_kernel))
y_grad_v = torch.abs(spatial_filter_nd(y, self.grad_v_kernel))
y_grad_w = torch.abs(spatial_filter_nd(y, self.grad_w_kernel))
# absolute difference
diff_u = torch.abs(x_grad_u - y_grad_u)
diff_v = torch.abs(x_grad_v - y_grad_v)
diff_w = torch.abs(x_grad_w - y_grad_w)
# reshape back
diff_u = diff_u.view(b, c, *spatial_shape)
diff_v = diff_v.view(b, c, *spatial_shape)
diff_w = diff_w.view(b, c, *spatial_shape)
diff_map = (diff_u + diff_v + diff_w) / 3.0
if self.reduction == 'mean':
diff = torch.mean(diff_map)
elif self.reduction == 'sum':
diff = torch.sum(diff_map)
else:
raise KeyError('unsupported reduction type: %s' % self.reduction)
if self.return_map:
return diff, diff_map
return diff
| 33.928205
| 102
| 0.631726
| 1,821
| 13,232
| 4.298737
| 0.068644
| 0.068983
| 0.068983
| 0.043689
| 0.883112
| 0.855263
| 0.852836
| 0.847982
| 0.822177
| 0.822177
| 0
| 0.010765
| 0.269876
| 13,232
| 389
| 103
| 34.015424
| 0.799503
| 0.140493
| 0
| 0.754864
| 0
| 0
| 0.022596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093385
| false
| 0
| 0.050584
| 0
| 0.194553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9ce20e78977ee63eaf76d1fe92b3e4600023913
| 750
|
py
|
Python
|
elasticdl/python/collective_ops/communicator.py
|
chunyang-wen/elasticdl
|
7b16b44f5314507494a552c11caaf3b2bce6d209
|
[
"MIT"
] | 1
|
2020-01-10T02:32:51.000Z
|
2020-01-10T02:32:51.000Z
|
elasticdl/python/collective_ops/communicator.py
|
chunyang-wen/elasticdl
|
7b16b44f5314507494a552c11caaf3b2bce6d209
|
[
"MIT"
] | null | null | null |
elasticdl/python/collective_ops/communicator.py
|
chunyang-wen/elasticdl
|
7b16b44f5314507494a552c11caaf3b2bce6d209
|
[
"MIT"
] | null | null | null |
# TODO: This is dummy for now until the real implementation
# has been open sourced
from elasticdl.python.common.constants import CollectiveCommunicatorStatus
class CollectiveCommunicator(object):
def __init__(self):
pass
def allreduce(self, data, op="MEAN"):
if data is None:
return CollectiveCommunicatorStatus.FAILED, data
return CollectiveCommunicatorStatus.SUCCEEDED, data
def broadcast(self, data, root_ip):
if data is None:
return CollectiveCommunicatorStatus.FAILED, data
return CollectiveCommunicatorStatus.SUCCEEDED, data
def barrier(self):
return CollectiveCommunicatorStatus.SUCCEEDED
def has_new_worker_joining(self):
return True
| 28.846154
| 74
| 0.72
| 78
| 750
| 6.820513
| 0.576923
| 0.319549
| 0.242481
| 0.045113
| 0.398496
| 0.398496
| 0.398496
| 0.398496
| 0.398496
| 0.398496
| 0
| 0
| 0.224
| 750
| 25
| 75
| 30
| 0.914089
| 0.105333
| 0
| 0.375
| 0
| 0
| 0.005988
| 0
| 0
| 0
| 0
| 0.04
| 0
| 1
| 0.3125
| false
| 0.0625
| 0.0625
| 0.125
| 0.8125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
758ad61a782dc1c68d435335538a887df14b3a4e
| 1,639
|
py
|
Python
|
tests/test_decorator.py
|
flagship-io/flagship-python-sdk
|
aa2e178f8631dc26d05e1c90cad1ecbbec4887dd
|
[
"Apache-2.0"
] | 6
|
2021-07-16T15:37:10.000Z
|
2021-08-04T14:52:02.000Z
|
tests/test_decorator.py
|
flagship-io/flagship-python-sdk
|
aa2e178f8631dc26d05e1c90cad1ecbbec4887dd
|
[
"Apache-2.0"
] | 5
|
2021-11-04T15:03:37.000Z
|
2022-02-15T11:43:48.000Z
|
tests/test_decorator.py
|
flagship-io/flagship-python-sdk
|
aa2e178f8631dc26d05e1c90cad1ecbbec4887dd
|
[
"Apache-2.0"
] | 1
|
2021-03-10T19:46:01.000Z
|
2021-03-10T19:46:01.000Z
|
from flagship.decorators import types_validator
def test_decorator_type_value():
@types_validator(True, {'types': int, 'max_length': 2, 'min_value': 0, 'max_value': 2})
def test(value):
pass
try:
test(1)
assert True
except Exception as e:
assert False
try:
test(4)
assert False
except Exception as e:
assert True
try:
test(-3)
assert False
except Exception as e:
assert True
try:
test('zlekfnelrkjnlfn')
assert False
except Exception as e:
assert True
def test_decorator_type_dict_value():
@types_validator(True, {'types': int, 'max_length': 2, 'min_value': 0, 'max_value': 2}, {'types': int, 'max_length': 2, 'min_value': 0, 'max_value': 2})
def test(value1, value2):
pass
try:
test(1, 1)
assert True
except Exception as e:
assert False
try:
test(4, 4)
assert False
except Exception as e:
assert True
try:
test(-3, -2)
assert False
except Exception as e:
assert True
try:
test('zlekfnelrkjnlfn', 'a')
assert False
except Exception as e:
assert True
def test_decorator_type_dict_value2():
@types_validator(True, {'types': [int, str], 'max_length': 2, 'min_value': 0, 'max_value': 2})
def test(value1):
pass
try:
test(1)
test("aa")
assert True
except Exception as e:
assert False
try:
test(1.33)
assert False
except Exception as e:
assert True
| 19.282353
| 156
| 0.561318
| 207
| 1,639
| 4.31401
| 0.178744
| 0.078387
| 0.19037
| 0.201568
| 0.856663
| 0.827548
| 0.827548
| 0.827548
| 0.783875
| 0.783875
| 0
| 0.026802
| 0.339841
| 1,639
| 84
| 157
| 19.511905
| 0.798521
| 0
| 0
| 0.703125
| 0
| 0
| 0.100794
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 1
| 0.09375
| false
| 0.046875
| 0.015625
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
75c1122857d124387b9a70e6b950baa0cdd8df79
| 685
|
py
|
Python
|
labs/attacklab/attack.py
|
Toolman-P/CSAPP-Labs
|
98749f20fedacde544e1d67e46c510d01c348ee3
|
[
"MIT"
] | null | null | null |
labs/attacklab/attack.py
|
Toolman-P/CSAPP-Labs
|
98749f20fedacde544e1d67e46c510d01c348ee3
|
[
"MIT"
] | null | null | null |
labs/attacklab/attack.py
|
Toolman-P/CSAPP-Labs
|
98749f20fedacde544e1d67e46c510d01c348ee3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# This script is to generate plain text for attacks and references
import os
for i in range(5):
print('00 00 00 00 00 00 00 00')
print('06 1a 40 00 00 00 00 00') #%rsp->%rax
print('c5 19 40 00 00 00 00 00') #%rax->%rdi
print('cc 19 40 00 00 00 00 00') #popq %rax
print('48 00 00 00 00 00 00 00')
print('dd 19 40 00 00 00 00 00') #%eax(rax)->%edx(rdx)
print('34 1a 40 00 00 00 00 00') #%edx(rdx)->%ecx(rcx)
print('13 1a 40 00 00 00 00 00') #%ecx(rcx)->%esi(rsi)
print('d6 19 40 00 00 00 00 00') #add rdi,rsi -> rax
print('c5 19 40 00 00 00 00 00') #%rax->%rdi
print('fa 18 40 00 00 00 00 00')
print('35 39 62 39 39 37 66 61')
# eax 只能支持低位运算所以 rsp 只能给rdi
| 36.052632
| 66
| 0.637956
| 158
| 685
| 2.765823
| 0.35443
| 0.448513
| 0.521739
| 0.494279
| 0.482838
| 0.482838
| 0.443936
| 0.24714
| 0.160183
| 0.160183
| 0
| 0.335175
| 0.207299
| 685
| 19
| 67
| 36.052632
| 0.469613
| 0.338686
| 0
| 0.142857
| 1
| 0
| 0.623025
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0.857143
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
75d79a0c5c5c510d0880a3a6094b4192da8bed3f
| 24
|
py
|
Python
|
test/test4.py
|
JosephAnderson234/Py-DownloadTube
|
7017d93fb9386a5749f217d2eee5440b8280b82f
|
[
"MIT"
] | null | null | null |
test/test4.py
|
JosephAnderson234/Py-DownloadTube
|
7017d93fb9386a5749f217d2eee5440b8280b82f
|
[
"MIT"
] | null | null | null |
test/test4.py
|
JosephAnderson234/Py-DownloadTube
|
7017d93fb9386a5749f217d2eee5440b8280b82f
|
[
"MIT"
] | null | null | null |
print(round(1.32251, 1))
| 24
| 24
| 0.708333
| 5
| 24
| 3.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 0.041667
| 24
| 1
| 24
| 24
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
75d8694905c3a6e87b6b6c48ba1fcf3d44559e8a
| 338
|
py
|
Python
|
main/CompuCellPythonTutorial/scientificPlots/Simulation/scientificPlots.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | null | null | null |
main/CompuCellPythonTutorial/scientificPlots/Simulation/scientificPlots.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | null | null | null |
main/CompuCellPythonTutorial/scientificPlots/Simulation/scientificPlots.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | 1
|
2021-02-26T21:50:29.000Z
|
2021-02-26T21:50:29.000Z
|
from cc3d import CompuCellSetup
from .scientificPlotsSteppables import ExtraPlotSteppable
from .scientificPlotsSteppables import ExtraMultiPlotSteppable
CompuCellSetup.register_steppable(steppable=ExtraPlotSteppable(frequency=10))
CompuCellSetup.register_steppable(steppable=ExtraMultiPlotSteppable(frequency=10))
CompuCellSetup.run()
| 33.8
| 82
| 0.890533
| 28
| 338
| 10.678571
| 0.428571
| 0.19398
| 0.234114
| 0.267559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0.053254
| 338
| 9
| 83
| 37.555556
| 0.91875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
75dd2d659d20b0d56817f15391bcc4f78e610ccd
| 3,973
|
py
|
Python
|
haskpy/tests/test_utils.py
|
jluttine/haskpy
|
79fca70b5f46d8551ed61b4bbd040de5f5ba0440
|
[
"MIT"
] | 2
|
2021-04-08T18:34:39.000Z
|
2022-02-24T18:02:45.000Z
|
haskpy/tests/test_utils.py
|
jluttine/haskpy
|
79fca70b5f46d8551ed61b4bbd040de5f5ba0440
|
[
"MIT"
] | null | null | null |
haskpy/tests/test_utils.py
|
jluttine/haskpy
|
79fca70b5f46d8551ed61b4bbd040de5f5ba0440
|
[
"MIT"
] | null | null | null |
import pytest
from haskpy import utils
from haskpy.internal import (
class_property,
class_function,
abstract_property,
abstract_function,
abstract_class_property,
abstract_class_function,
)
def test_class_property():
class A():
@class_property
def foo(cls):
"""Docstring of foo"""
return 42
assert A.foo == 42
assert A.__dict__["foo"].__doc__ == "Docstring of foo"
assert "foo" in dir(A)
with pytest.raises(
AttributeError,
match="'A' object has no attribute 'foo'",
):
A().foo
with pytest.raises(KeyError, match="'foo'"):
A().__dict__["foo"]
# Unfortunately, by default __dir__ just adds all class attributes to
# instances too. So, let's disable the test.
# assert "foo" not in dir(A())
return
def test_class_function():
class A():
@class_function
def foo(cls, x, y):
"""Docstring of foo"""
return x + y
assert A.foo(10, 32) == 42
assert A.foo.__doc__ == "Docstring of foo"
assert "foo" in dir(A)
with pytest.raises(
AttributeError,
match="'A' object has no attribute 'foo'",
):
A().foo
with pytest.raises(KeyError, match="'foo'"):
A().__dict__["foo"]
# Unfortunately, by default __dir__ just adds all class attributes to
# instances too. So, let's disable the test.
# assert "foo" not in dir(A())
return
def test_abstract_function():
class A():
@abstract_function
def foo(self, x, y):
"""Docstring of foo"""
f = A.foo
assert f.__doc__ == "Docstring of foo"
assert "foo" in dir(A)
with pytest.raises(
NotImplementedError,
match="'foo' function is abstract",
):
f()
g = A().foo
assert g.__doc__ == "Docstring of foo"
assert "foo" in dir(A())
with pytest.raises(
NotImplementedError,
match="'foo' function is abstract",
):
g()
return
def test_abstract_property():
class A():
@abstract_property
def foo(self):
"""Docstring of foo"""
with pytest.raises(
NotImplementedError,
match="'foo' attribute of type object 'A' is abstract",
):
A.foo
with pytest.raises(
NotImplementedError,
match="'foo' attribute of object 'A' is abstract",
):
A().foo
assert A.__dict__["foo"].__doc__ == "Docstring of foo"
assert "foo" in dir(A)
assert "foo" in dir(A())
return
def test_abstract_class_function():
class A():
@abstract_class_function
def foo(cls, x, y):
"""Docstring of foo"""
f = A.foo
assert f.__doc__ == "Docstring of foo"
assert "foo" in dir(A)
with pytest.raises(
NotImplementedError,
match="'foo' function is abstract",
):
f()
with pytest.raises(
AttributeError,
match="'A' object has no attribute 'foo'",
):
A().foo
# Unfortunately, by default __dir__ just adds all class attributes to
# instances too. So, let's disable the test.
# assert "foo" not in dir(A())
return
def test_abstract_class_property():
class A():
@abstract_class_property
def foo(cls):
"""Docstring of foo"""
assert A.__dict__["foo"].__doc__ == "Docstring of foo"
assert "foo" in dir(A)
with pytest.raises(
NotImplementedError,
match="'foo' attribute of type object 'A' is abstract",
):
A.foo
with pytest.raises(
AttributeError,
match="'A' object has no attribute 'foo'",
):
A().foo
# Unfortunately, by default __dir__ just adds all class attributes to
# instances too. So, let's disable the test.
# assert "foo" not in dir(A())
return
| 20.910526
| 73
| 0.565064
| 477
| 3,973
| 4.509434
| 0.12369
| 0.066481
| 0.084612
| 0.074384
| 0.803812
| 0.799628
| 0.79126
| 0.79126
| 0.74198
| 0.72292
| 0
| 0.003706
| 0.320916
| 3,973
| 189
| 74
| 21.021164
| 0.793551
| 0.166625
| 0
| 0.715517
| 0
| 0
| 0.154176
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 1
| 0.103448
| false
| 0
| 0.025862
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f93ba0d557a8431d4c49057d06f03f942e31489e
| 8,448
|
py
|
Python
|
tests/models/test_conflicts.py
|
yaal-fr/sheraf
|
9821a53d8b0ea0aba420175e4cfa81529262f88c
|
[
"MIT"
] | 1
|
2020-03-18T09:54:52.000Z
|
2020-03-18T09:54:52.000Z
|
tests/models/test_conflicts.py
|
yaal-fr/sheraf
|
9821a53d8b0ea0aba420175e4cfa81529262f88c
|
[
"MIT"
] | null | null | null |
tests/models/test_conflicts.py
|
yaal-fr/sheraf
|
9821a53d8b0ea0aba420175e4cfa81529262f88c
|
[
"MIT"
] | null | null | null |
import multiprocessing
import pytest
import ZODB
import sheraf
import tests
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_database"),
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_empty_model_no_conflict(database):
database.nestable = True
class Model(tests.UUIDAutoModel):
pass
with sheraf.connection(commit=True):
m = Model.create()
with sheraf.connection(commit=True):
m1 = Model.read(m.id)
with sheraf.connection(commit=True):
m2 = Model.read(m.id)
m2.save()
m1.save()
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_database"),
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_same_simple_attribute_same_modification_no_conflict(database):
database.nestable = True
class Model(tests.UUIDAutoModel):
something = sheraf.attributes.simples.SimpleAttribute()
stuff = sheraf.LargeListAttribute(lazy=False)
with sheraf.connection(commit=True):
m = Model.create()
with sheraf.connection(commit=True):
m1 = Model.read(m.id)
with sheraf.connection(commit=True):
m2 = Model.read(m.id)
m2.something = "YOLO"
m1.something = "YOLO"
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_database"),
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_different_simple_attribute_modification_no_conflict(database):
database.nestable = True
class Model(tests.UUIDAutoModel):
something = sheraf.attributes.simples.SimpleAttribute()
something_else = sheraf.attributes.simples.SimpleAttribute()
stuff = sheraf.LargeListAttribute(lazy=False)
with sheraf.connection(commit=True):
m = Model.create()
with sheraf.connection(commit=True):
m1 = Model.read(m.id)
with sheraf.connection(commit=True):
m2 = Model.read(m.id)
m2.something = "YOLO"
m1.something_else = "YEAH"
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_database"),
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_same_simple_attribute_different_modification_conflict(database):
database.nestable = True
class Model(tests.UUIDAutoModel):
something = sheraf.attributes.simples.SimpleAttribute()
stuff = sheraf.LargeListAttribute(lazy=False)
with sheraf.connection(commit=True):
m = Model.create()
mid = m.id
with pytest.raises(ZODB.POSException.ConflictError):
with sheraf.connection(commit=True):
m1 = Model.read(m.id)
with sheraf.connection(commit=True):
m2 = Model.read(m.id)
m2.something = "connection 2"
m1.something = "connection 1"
with sheraf.connection():
m = Model.read(mid)
assert "connection 2" == m.something
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_empty_model_no_conflict_mp(database):
class Model(tests.UUIDAutoModel):
pass
def process(uri, model_id, barrier):
sheraf.Database(uri)
with sheraf.connection(commit=True):
m = Model.read(model_id)
barrier.wait()
m.save()
with sheraf.connection(commit=True):
m = Model.create()
barrier = multiprocessing.Barrier(2)
process1 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier)
)
process2 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier)
)
process1.start()
process2.start()
process2.join(timeout=10)
process1.join(timeout=10)
assert 0 == process1.exitcode
assert 0 == process2.exitcode
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_same_simple_attribute_same_modification_conflict_mp(database):
class Model(tests.UUIDAutoModel):
order = sheraf.SimpleAttribute()
stuff = sheraf.LargeListAttribute(lazy=False)
def process(uri, model_id, barrier):
sheraf.Database(uri)
with sheraf.connection(commit=True):
m = Model.read(model_id)
barrier.wait()
m.order = "YOLO"
with sheraf.connection(commit=True):
m = Model.create()
barrier = multiprocessing.Barrier(2)
process1 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier)
)
process2 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier)
)
process1.start()
process2.start()
process2.join(timeout=10)
process1.join(timeout=10)
assert 0 == process1.exitcode
assert 0 == process2.exitcode
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_different_simple_attribute_modification_no_conflict_mp(database):
class Model(tests.UUIDAutoModel):
something = sheraf.SimpleAttribute()
something_else = sheraf.SimpleAttribute()
stuff = sheraf.LargeListAttribute(lazy=False)
def process(uri, model_id, barrier, queue, lock):
sheraf.Database(uri)
with sheraf.connection(commit=True):
m = Model.read(model_id)
barrier.wait()
with lock:
order = queue.get()
if order == "first":
m.something = "YEAH"
elif order == "second":
m.something_else = "YOH"
with sheraf.connection(commit=True):
m = Model.create()
barrier = multiprocessing.Barrier(2)
lock = multiprocessing.Lock()
queue = multiprocessing.Queue()
queue.put("first")
queue.put("second")
process1 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier, queue, lock)
)
process2 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier, queue, lock)
)
process1.start()
process2.start()
process2.join(timeout=10)
process1.join(timeout=10)
assert 0 == process1.exitcode
assert 0 == process2.exitcode
@pytest.mark.parametrize(
"database",
[
pytest.lazy_fixture("sheraf_zeo_database"),
# pytest.lazy_fixture("sheraf_pgsql_relstorage_database"),
],
)
def test_same_simple_attribute_different_modification_conflict_mp(database):
class Model(tests.UUIDAutoModel):
order = sheraf.SimpleAttribute()
stuff = sheraf.LargeListAttribute(lazy=False)
def process(uri, model_id, barrier, queue, lock):
sheraf.Database(uri)
with sheraf.connection() as conn:
m = Model.read(model_id)
barrier.wait()
with lock:
order = queue.get()
m.order = order
if order == "first":
conn.transaction_manager.commit()
elif order == "second":
with pytest.raises(ZODB.POSException.ConflictError):
conn.transaction_manager.commit()
with sheraf.connection(commit=True):
m = Model.create()
barrier = multiprocessing.Barrier(2)
lock = multiprocessing.Lock()
queue = multiprocessing.Queue()
queue.put("first")
queue.put("second")
process1 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier, queue, lock)
)
process2 = multiprocessing.Process(
target=process, args=(database.uri, m.id, barrier, queue, lock)
)
process1.start()
process2.start()
process2.join(timeout=10)
process1.join(timeout=10)
assert 0 == process1.exitcode
assert 0 == process2.exitcode
| 26.734177
| 76
| 0.637074
| 894
| 8,448
| 5.878076
| 0.096197
| 0.039962
| 0.079924
| 0.095147
| 0.924072
| 0.92255
| 0.902569
| 0.902569
| 0.888107
| 0.888107
| 0
| 0.012494
| 0.251539
| 8,448
| 315
| 77
| 26.819048
| 0.818599
| 0.056345
| 0
| 0.760684
| 0
| 0
| 0.048116
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.051282
| false
| 0.008547
| 0.021368
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9d70dda164cc88745204bc751e73d434c7cae51
| 10,072
|
py
|
Python
|
test.py
|
rqou/yavhdl
|
0dae62b5ff56a48f54da24188ea4d30c64a93d4e
|
[
"BSD-2-Clause"
] | 33
|
2017-03-22T13:52:41.000Z
|
2021-11-09T11:16:46.000Z
|
test.py
|
rqou/yavhdl
|
0dae62b5ff56a48f54da24188ea4d30c64a93d4e
|
[
"BSD-2-Clause"
] | 2
|
2017-03-26T07:53:17.000Z
|
2018-11-19T08:38:27.000Z
|
test.py
|
rqou/yavhdl
|
0dae62b5ff56a48f54da24188ea4d30c64a93d4e
|
[
"BSD-2-Clause"
] | 3
|
2017-03-26T05:15:21.000Z
|
2019-08-28T19:58:21.000Z
|
#!/usr/bin/env python3
import difflib
import json
import os
import os.path
import subprocess
import sys
import traceback
# XXX NOTE: THIS CLASS IS EVIL
# The dict must not be mutated after using this
class hack_hashable_dict(dict):
def __hash__(self):
return hash(tuple(sorted(self.items())))
def do_set_hack(x):
if type(x) == dict:
return hack_hashable_dict({k: do_set_hack(v) for k, v in x.items()})
elif type(x) == list:
if len(x) == 0:
return x
if x[0] != "__is_a_set":
return [do_set_hack(x) for x in x]
# We actually have a set now
return set((do_set_hack(x) for x in x[1:]))
else:
return x
def do_parser_tests():
print("*" * 80)
print("Running parser tests...")
print("*" * 80)
# Gather tests
test_files = os.listdir("parser_tests")
test_files_real = []
test_files_set = set()
for f in sorted(test_files):
name, ext = os.path.basename(f).rsplit(".", 1)
vhd_name = "parser_tests/" + name + ".vhd"
json_name = "parser_tests/" + name + ".json"
fail_name = "parser_tests/" + name + ".fail"
if (os.path.isfile(vhd_name) and
(os.path.isfile(json_name) or os.path.isfile(fail_name))):
if os.path.isfile(json_name):
this_test = (vhd_name, json_name, name)
else:
this_test = (vhd_name, None, name)
if name not in test_files_set:
print("Found test \"" + name + "\"")
test_files_real.append(this_test)
test_files_set.add(name)
print("Found " + str(len(test_files_real)) + " tests")
# Run each test
failures = False
for vhd_file, json_file, base_name in test_files_real:
if json_file:
print(base_name + ": ", end='')
else:
print(base_name + " (expect fail): ", end='')
# Run parser
subp = subprocess.run(['./vhdl_parser', vhd_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if json_file:
# Load reference
with open(json_file, 'r') as inf:
reference = json.load(inf)
if subp.returncode != 0:
failures = True
print("\x1b[31m✗")
print("Executing parser failed!\x1b[0m")
print("\x1b[33m----- stdout -----\x1b[0m")
sys.stdout.buffer.write(subp.stdout)
print("\x1b[33m----- stderr -----\x1b[0m")
sys.stdout.buffer.write(subp.stderr)
continue
# Load parser result
try:
prog_output = json.loads(subp.stdout.decode('ascii'))
except Exception as e:
failures = True
print("\x1b[31m✗")
print("Bad parser output!\x1b[0m")
print("\x1b[33m----- stdout -----\x1b[0m")
sys.stdout.buffer.write(subp.stdout)
print("\x1b[33m----- stderr -----\x1b[0m")
sys.stdout.buffer.write(subp.stderr)
print("\x1b[33m----- exception -----\x1b[0m")
print(traceback.format_exc())
continue
# Compare
if prog_output != reference:
failures = True
print("\x1b[31m✗")
print("Test output mismatch!\x1b[0m")
# Re-encode in sorted order for proper diffing
ref_json = json.dumps(reference, indent=4,
separators=(',', ': '), sort_keys=True)
out_json = json.dumps(prog_output, indent=4,
separators=(',', ': '), sort_keys=True)
print("\x1b[33m----- expected -----\x1b[0m")
print(ref_json)
print("\x1b[33m----- actual -----\x1b[0m")
print(out_json)
print("\x1b[33m----- diff -----\x1b[0m")
udiff = difflib.unified_diff(ref_json.split('\n'),
out_json.split('\n'),
fromfile="expected_output",
tofile="parser_output",
lineterm='')
print('\n'.join(udiff))
else:
print("\x1b[32m✓\x1b[0m")
else:
# Expect failure
if subp.returncode == 0:
failures = True
print("\x1b[31m✗")
print("Executing parser succeeded when it should not!\x1b[0m")
print("\x1b[33m----- stdout -----\x1b[0m")
sys.stdout.buffer.write(subp.stdout)
print("\x1b[33m----- stderr -----\x1b[0m")
sys.stdout.buffer.write(subp.stderr)
continue
print("\x1b[32m✓\x1b[0m")
return failures
# FIXME: Fix copypasta
def do_analyser_json_tests():
print("*" * 80)
print("Running analyser JSON tests...")
print("*" * 80)
# Gather tests
test_files = os.listdir("analyser_json_tests")
test_files_real = []
test_files_set = set()
for f in sorted(test_files):
name, ext = os.path.basename(f).rsplit(".", 1)
vhd_name = "analyser_json_tests/" + name + ".vhd"
json_name = "analyser_json_tests/" + name + ".json"
fail_name = "analyser_json_tests/" + name + ".fail"
if (os.path.isfile(vhd_name) and
(os.path.isfile(json_name) or os.path.isfile(fail_name))):
if os.path.isfile(json_name):
this_test = (vhd_name, json_name, name)
else:
this_test = (vhd_name, None, name)
if name not in test_files_set:
print("Found test \"" + name + "\"")
test_files_real.append(this_test)
test_files_set.add(name)
print("Found " + str(len(test_files_real)) + " tests")
# Run each test
failures = False
for vhd_file, json_file, base_name in test_files_real:
if json_file:
print(base_name + ": ", end='')
else:
print(base_name + " (expect fail): ", end='')
# Run parser
subp = subprocess.run(['./vhdl_analyzer', 'worklib', vhd_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if json_file:
# Load reference
with open(json_file, 'r') as inf:
reference = json.load(inf)
if subp.returncode != 0:
failures = True
print("\x1b[31m✗")
print("Executing parser failed!\x1b[0m")
print("\x1b[33m----- stdout -----\x1b[0m")
sys.stdout.buffer.write(subp.stdout)
print("\x1b[33m----- stderr -----\x1b[0m")
sys.stdout.buffer.write(subp.stderr)
continue
# Load parser result
try:
last_line = subp.stdout.strip().split(b'\n')[-1]
prog_output = json.loads(last_line.decode('ascii'))
except Exception as e:
failures = True
print("\x1b[31m✗")
print("Bad parser output!\x1b[0m")
print("\x1b[33m----- stdout -----\x1b[0m")
sys.stdout.buffer.write(subp.stdout)
print("\x1b[33m----- stderr -----\x1b[0m")
sys.stdout.buffer.write(subp.stderr)
print("\x1b[33m----- exception -----\x1b[0m")
print(traceback.format_exc())
continue
# Compare
if do_set_hack(prog_output) != do_set_hack(reference):
failures = True
print("\x1b[31m✗")
print("Test output mismatch!\x1b[0m")
# Re-encode in sorted order for proper diffing
ref_json = json.dumps(reference, indent=4,
separators=(',', ': '), sort_keys=True)
out_json = json.dumps(prog_output, indent=4,
separators=(',', ': '), sort_keys=True)
print("\x1b[33m----- expected -----\x1b[0m")
print(ref_json)
print("\x1b[33m----- actual -----\x1b[0m")
print(out_json)
print("\x1b[33m----- diff -----\x1b[0m")
udiff = difflib.unified_diff(ref_json.split('\n'),
out_json.split('\n'),
fromfile="expected_output",
tofile="parser_output",
lineterm='')
print('\n'.join(udiff))
else:
print("\x1b[32m✓\x1b[0m")
else:
# Expect failure
if subp.returncode == 0:
failures = True
print("\x1b[31m✗")
print("Executing parser succeeded when it should not!\x1b[0m")
print("\x1b[33m----- stdout -----\x1b[0m")
sys.stdout.buffer.write(subp.stdout)
print("\x1b[33m----- stderr -----\x1b[0m")
sys.stdout.buffer.write(subp.stderr)
continue
print("\x1b[32m✓\x1b[0m")
return failures
def main():
# I have been burned too many times by this flakiness, so we first set our
# CWD to "definitely where this file is" which also must be in the root
# of the repo
os.chdir(os.path.dirname(__file__))
failures = False
failures = failures or do_parser_tests()
failures = failures or do_analyser_json_tests()
if failures:
print("\x1b[31mThere were test failures!\x1b[0m")
sys.exit(1)
else:
print("\x1b[32mAll tests pass!\x1b[0m")
if __name__ == '__main__':
main()
| 36.625455
| 78
| 0.487788
| 1,122
| 10,072
| 4.23975
| 0.174688
| 0.057179
| 0.046248
| 0.035316
| 0.803868
| 0.763927
| 0.763927
| 0.763927
| 0.756779
| 0.739542
| 0
| 0.030887
| 0.37639
| 10,072
| 274
| 79
| 36.759124
| 0.724566
| 0.057486
| 0
| 0.780374
| 0
| 0
| 0.170451
| 0
| 0
| 0
| 0
| 0.00365
| 0
| 1
| 0.023364
| false
| 0.004673
| 0.03271
| 0.004673
| 0.098131
| 0.299065
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ddaae8c42f095c5ae9bb1c054f2decd5067c5175
| 10,264
|
py
|
Python
|
unit_tests/services/validation/test_field_validator.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2019-10-03T13:58:29.000Z
|
2019-10-03T13:58:29.000Z
|
unit_tests/services/validation/test_field_validator.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | null | null | null |
unit_tests/services/validation/test_field_validator.py
|
LandRegistry/maintain-frontend
|
d92446a9972ebbcd9a43a7a7444a528aa2f30bf7
|
[
"MIT"
] | 1
|
2021-04-11T05:24:57.000Z
|
2021-04-11T05:24:57.000Z
|
from unittest import TestCase
from maintain_frontend.services.validation.field_validator import FieldValidator
from maintain_frontend.services.validation.validation_error_builder import ValidationErrorBuilder
VALID_PHONE = '01234 567890'
INVALID_PHONE_NON_NUMERIC = 'test'
INVALID_PHONE_DASH = '01234-567890'
NINE_CHARS = '9 chars..'
TEN_CHARS = '10 chars..'
ELEVEN_CHARS = '11 chars...'
OVERWRITE_MESSAGE = 'Test overwrite message'
class TestFieldValidator(TestCase):
def test_is_required_adds_error_when_input_empty(self):
validation_error_builder = ValidationErrorBuilder()
username = ''
FieldValidator(username, 'username', 'Username',
validation_error_builder) \
.is_required()
validation_errors = validation_error_builder.get().errors
self.assertEqual(validation_errors['username'].summary_message,
'Username is required')
def test_is_required_doesnt_add_error_when_input_provided(self):
validation_error_builder = ValidationErrorBuilder()
username = 'TestUser'
FieldValidator(username, 'username', 'Username',
validation_error_builder) \
.is_required()
validation_errors = validation_error_builder.get().errors
self.assertTrue('username' not in validation_errors)
def test_is_email_adds_error_when_email_invalid(self):
validation_error_builder = ValidationErrorBuilder()
email = 'invalid email'
FieldValidator(email, 'email', email, validation_error_builder) \
.is_email()
validation_errors = validation_error_builder.get().errors
self.assertEqual(validation_errors['email'].summary_message,
'invalid email is not a valid email address')
def test_is_less_than_length_short_value(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(NINE_CHARS, 'username', 'Username',
validation_error_builder) \
.is_length_less_than_or_equal_to(10)
validation_errors = validation_error_builder.get().errors
self.assertTrue('username' not in validation_errors)
def test_is_less_than_length_long_value(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(ELEVEN_CHARS, 'username', 'Username',
validation_error_builder) \
.is_length_less_than_or_equal_to(10)
validation_errors = validation_error_builder.get().errors
self.assertTrue('username' in validation_errors)
def test_is_less_than_length_same_value(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(TEN_CHARS, 'username', 'Username',
validation_error_builder) \
.is_length_less_than_or_equal_to(10)
validation_errors = validation_error_builder.get().errors
self.assertTrue('username' not in validation_errors)
def test_is_length_equal_to_true(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(TEN_CHARS, 'username', 'Username',
validation_error_builder) \
.is_length_equal_to(10)
validation_errors = validation_error_builder.get().errors
self.assertTrue('username' not in validation_errors)
def test_is_length_equal_to_false(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(TEN_CHARS, 'username', 'Username',
validation_error_builder) \
.is_length_equal_to(9)
validation_errors = validation_error_builder.get().errors
self.assertTrue('username' in validation_errors)
def test_is_item_count_equal_to(self):
list_set = [1, 2]
validation_error_builder = ValidationErrorBuilder()
FieldValidator(list_set, 'a', 'b',
validation_error_builder) \
.is_item_count_equal_to(2)
validation_errors = validation_error_builder.get().errors
self.assertEqual(len(validation_errors), 0)
def test_is_item_count_equal_to_set_error(self):
list_set = [1, 2]
validation_error_builder = ValidationErrorBuilder()
FieldValidator(list_set, 'a', 'b', validation_error_builder) \
.is_item_count_equal_to(1)
validation_errors = validation_error_builder.get().errors
self.assertEqual(len(validation_errors), 1)
self.assertEqual('Only 1 b can be supplied', validation_errors['a'].summary_message)
def test_is_phone_number_adds_error_when_phone_invalid_dash(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(INVALID_PHONE_DASH, 'phoneNumber', 'Phone number',
validation_error_builder) \
.is_phone_number()
validation_errors = validation_error_builder.get().errors
self.assertEqual(validation_errors['phoneNumber'].summary_message,
'Phone number is not a valid phone number')
def test_is_phone_number_adds_error_when_phone_invalid_non_numeric(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(INVALID_PHONE_NON_NUMERIC, 'phoneNumber', 'Phone number',
validation_error_builder) \
.is_phone_number()
validation_errors = validation_error_builder.get().errors
self.assertEqual(validation_errors['phoneNumber'].summary_message,
'Phone number is not a valid phone number')
def test_is_phone_number_doesnt_add_error_when_phone_valid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator(VALID_PHONE, 'phoneNumber', 'Phone number',
validation_error_builder) \
.is_phone_number()
validation_errors = validation_error_builder.get().errors
self.assertTrue('phoneNumber' not in validation_errors)
def test_is_item_in_list_doesnt_add_error_when_valid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("abc", 'testfield', 'Test Field',
validation_error_builder) \
.is_item_in_list(["abc"])
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' not in validation_errors)
def test_is_item_in_list_does_add_error_when_invalid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("def", 'testfield', 'Test Field',
validation_error_builder) \
.is_item_in_list(["abc"])
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' in validation_errors)
def test_is_item_not_in_list_doesnt_add_error_when_valid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("abc", 'testfield', 'Test Field',
validation_error_builder) \
.is_item_not_in_list(["def"])
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' not in validation_errors)
def test_is_item_not_in_list_does_add_error_when_invalid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("def", 'testfield', 'Test Field',
validation_error_builder) \
.is_item_not_in_list(["def"])
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' in validation_errors)
def test_add_error_with_header_message(self):
validation_error_builder = ValidationErrorBuilder()
email = 'test'
header_message = 'This is a header message'
FieldValidator(email, 'email', 'Email',
validation_error_builder, summary_heading_text=header_message) \
.is_email()
validation_errors = validation_error_builder.get()
self.assertEqual(len(validation_errors.errors), 1)
self.assertEqual(validation_errors.summary_heading_text, header_message)
def test_add_error_with_inline_message(self):
validation_error_builder = ValidationErrorBuilder()
email = 'test'
inline_message = 'This is an inline message'
FieldValidator(email, 'email', None,
validation_error_builder, inline_message=inline_message) \
.is_email()
validation_errors = validation_error_builder.get().errors
self.assertEqual(validation_errors['email'].inline_message, inline_message)
def test_is_positive_or_zero_doesnt_add_error_when_valid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("12345", 'testfield', 'Test Field',
validation_error_builder) \
.is_positive_number_or_zero()
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' not in validation_errors)
def test_is_positive_or_zero_does_add_error_when_invalid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("-12345", 'testfield', 'Test Field',
validation_error_builder) \
.is_positive_number_or_zero()
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' in validation_errors)
def test_is_int_doesnt_add_error_when_valid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("12345", 'testfield', 'Test Field',
validation_error_builder) \
.is_int()
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' not in validation_errors)
def test_is_int_does_add_error_when_invalid(self):
validation_error_builder = ValidationErrorBuilder()
FieldValidator("123.5", 'testfield', 'Test Field',
validation_error_builder) \
.is_int()
validation_errors = validation_error_builder.get().errors
self.assertTrue('testfield' in validation_errors)
| 39.937743
| 97
| 0.690472
| 1,080
| 10,264
| 6.126852
| 0.093519
| 0.158682
| 0.232734
| 0.152939
| 0.861418
| 0.831797
| 0.796736
| 0.743086
| 0.701375
| 0.686263
| 0
| 0.008226
| 0.230125
| 10,264
| 256
| 98
| 40.09375
| 0.829157
| 0
| 0
| 0.627027
| 0
| 0
| 0.08749
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 1
| 0.124324
| false
| 0
| 0.016216
| 0
| 0.145946
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dde71824f17af66623f6e270ffbed95abfc91269
| 148
|
py
|
Python
|
examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_different_io_managers.py
|
kstennettlull/dagster
|
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_different_io_managers.py
|
kstennettlull/dagster
|
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
|
[
"Apache-2.0"
] | null | null | null |
examples/docs_snippets/docs_snippets_tests/concepts_tests/assets_tests/test_asset_different_io_managers.py
|
kstennettlull/dagster
|
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
|
[
"Apache-2.0"
] | 1
|
2019-09-11T03:02:27.000Z
|
2019-09-11T03:02:27.000Z
|
from docs_snippets.concepts.assets.asset_different_io_managers import asset_group
def test_asset_group():
assert len(asset_group.assets) == 2
| 24.666667
| 81
| 0.817568
| 22
| 148
| 5.136364
| 0.727273
| 0.265487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007576
| 0.108108
| 148
| 5
| 82
| 29.6
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fb16ae8b98f04fbda64ed2d464d9ae4abbbebe26
| 5,227
|
py
|
Python
|
test/test_k_2048.py
|
ivanlyon/exercises
|
0792976ae2acb85187b26a52812f9ebdd119b5e8
|
[
"MIT"
] | null | null | null |
test/test_k_2048.py
|
ivanlyon/exercises
|
0792976ae2acb85187b26a52812f9ebdd119b5e8
|
[
"MIT"
] | null | null | null |
test/test_k_2048.py
|
ivanlyon/exercises
|
0792976ae2acb85187b26a52812f9ebdd119b5e8
|
[
"MIT"
] | null | null | null |
import io
import unittest
from unittest.mock import patch
from kattis import k_2048
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input_1(self):
'''Run and assert problem statement sample 1 input and output.'''
inputs, outputs = [], []
inputs.append('2 0 0 2')
inputs.append('4 16 8 2')
inputs.append('2 64 32 4')
inputs.append('1024 1024 64 0')
inputs.append('0')
inputs = '\n'.join(inputs) + '\n'
outputs.append('4 0 0 0')
outputs.append('4 16 8 2')
outputs.append('2 64 32 4')
outputs.append('2048 64 0 0')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_2048.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_2(self):
'''Run and assert problem statement sample 2 input and output.'''
inputs, outputs = [], []
inputs.append('2 0 0 2')
inputs.append('4 16 8 2')
inputs.append('2 64 32 4')
inputs.append('1024 1024 64 0')
inputs.append('1')
inputs = '\n'.join(inputs) + '\n'
outputs.append('2 16 8 4')
outputs.append('4 64 32 4')
outputs.append('2 1024 64 0')
outputs.append('1024 0 0 0')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_2048.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_3(self):
'''Run and assert problem statement sample 3 input and output.'''
inputs, outputs = [], []
inputs.append('2 0 0 2')
inputs.append('4 16 8 2')
inputs.append('2 64 32 4')
inputs.append('1024 1024 64 0')
inputs.append('2')
inputs = '\n'.join(inputs) + '\n'
outputs.append('0 0 0 4')
outputs.append('4 16 8 2')
outputs.append('2 64 32 4')
outputs.append('0 0 2048 64')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_2048.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_4(self):
'''Run and assert problem statement sample 4 input and output.'''
inputs, outputs = [], []
inputs.append('2 0 0 2')
inputs.append('4 16 8 2')
inputs.append('2 64 32 4')
inputs.append('1024 1024 64 0')
inputs.append('3')
inputs = '\n'.join(inputs) + '\n'
outputs.append('2 0 0 0')
outputs.append('4 16 8 0')
outputs.append('2 64 32 4')
outputs.append('1024 1024 64 4')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_2048.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_5(self):
'''Run and assert problem statement sample 5 input and output.'''
inputs, outputs = [], []
inputs.append('2 2 4 8')
inputs.append('4 0 4 4')
inputs.append('16 16 16 16')
inputs.append('32 16 16 32')
inputs.append('0')
inputs = '\n'.join(inputs) + '\n'
outputs.append('4 4 8 0')
outputs.append('8 4 0 0')
outputs.append('32 32 0 0')
outputs.append('32 32 32 0')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_2048.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_6(self):
'''Run and assert problem statement sample 6 input and output.'''
inputs, outputs = [], []
inputs.append('2 2 4 8')
inputs.append('4 0 4 4')
inputs.append('16 16 16 16')
inputs.append('32 16 16 32')
inputs.append('2')
inputs = '\n'.join(inputs) + '\n'
outputs.append('0 4 4 8')
outputs.append('0 0 4 8')
outputs.append('0 0 32 32')
outputs.append('0 32 32 32')
outputs = '\n'.join(outputs) + '\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_2048.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
###############################################################################
if __name__ == '__main__':
unittest.main()
| 35.557823
| 79
| 0.539698
| 683
| 5,227
| 4.073206
| 0.079063
| 0.129403
| 0.056075
| 0.025162
| 0.88785
| 0.88138
| 0.8555
| 0.773544
| 0.730769
| 0.730769
| 0
| 0.090471
| 0.28525
| 5,227
| 146
| 80
| 35.80137
| 0.654176
| 0.0771
| 0
| 0.704348
| 0
| 0
| 0.132181
| 0
| 0
| 0
| 0
| 0
| 0.104348
| 1
| 0.052174
| false
| 0
| 0.034783
| 0
| 0.095652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb3d415f447bcbd2a736e8c2d01bb297299a27cc
| 48
|
py
|
Python
|
paymill/utils/__init__.py
|
cruncher/pymill
|
dc4213f75a0581414d063f729edb91f6308dca84
|
[
"MIT"
] | null | null | null |
paymill/utils/__init__.py
|
cruncher/pymill
|
dc4213f75a0581414d063f729edb91f6308dca84
|
[
"MIT"
] | null | null | null |
paymill/utils/__init__.py
|
cruncher/pymill
|
dc4213f75a0581414d063f729edb91f6308dca84
|
[
"MIT"
] | null | null | null |
from . import http_client
from . import pm_error
| 24
| 25
| 0.8125
| 8
| 48
| 4.625
| 0.75
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 48
| 2
| 26
| 24
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3487725801306efec6c8d03e76465afd2a0df37e
| 108
|
py
|
Python
|
aix360/algorithms/shap/__init__.py
|
Qingtian-Zou/AIX360
|
cf25f58077ae002fb4542b680fd98db47758dae5
|
[
"Apache-2.0"
] | 609
|
2019-08-02T17:55:18.000Z
|
2020-07-11T18:11:09.000Z
|
aix360/algorithms/shap/__init__.py
|
Qingtian-Zou/AIX360
|
cf25f58077ae002fb4542b680fd98db47758dae5
|
[
"Apache-2.0"
] | 47
|
2019-08-05T15:00:35.000Z
|
2020-07-13T20:35:57.000Z
|
aix360/algorithms/shap/__init__.py
|
Qingtian-Zou/AIX360
|
cf25f58077ae002fb4542b680fd98db47758dae5
|
[
"Apache-2.0"
] | 147
|
2019-07-12T11:30:31.000Z
|
2020-07-04T19:18:49.000Z
|
from .shap_wrapper import KernelExplainer, GradientExplainer, DeepExplainer, TreeExplainer, LinearExplainer
| 54
| 107
| 0.87963
| 9
| 108
| 10.444444
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 108
| 1
| 108
| 108
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
349593a2940b73d5bf5086b9e86a4f6f10e7a1ce
| 221
|
py
|
Python
|
sputnik/Sputnik.py
|
errord/sputnik
|
b83c635a9a160dcd5809265c0d9d231ade33e5ea
|
[
"BSD-3-Clause"
] | null | null | null |
sputnik/Sputnik.py
|
errord/sputnik
|
b83c635a9a160dcd5809265c0d9d231ade33e5ea
|
[
"BSD-3-Clause"
] | null | null | null |
sputnik/Sputnik.py
|
errord/sputnik
|
b83c635a9a160dcd5809265c0d9d231ade33e5ea
|
[
"BSD-3-Clause"
] | 1
|
2018-03-04T04:48:44.000Z
|
2018-03-04T04:48:44.000Z
|
#-*- coding: utf-8 -*
#
# Copyright 2011 msx.com
# by error.d@gmail.com
# 2012-3-12
#
# Sputnik
#
# ToDoList:
#
import SpuLogging
def set_logging_config(logging_config):
SpuLogging.logging_config = logging_config
| 13
| 46
| 0.705882
| 30
| 221
| 5.033333
| 0.733333
| 0.344371
| 0.264901
| 0.344371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064865
| 0.162896
| 221
| 16
| 47
| 13.8125
| 0.751351
| 0.41629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
550c16153a3efb30e8e40b4728f6eee13698e0e6
| 89
|
py
|
Python
|
src/IArandom.py
|
Couapy/othello-isn
|
6a2d007cbde9c290a345699e31342e9902d5307e
|
[
"MIT"
] | null | null | null |
src/IArandom.py
|
Couapy/othello-isn
|
6a2d007cbde9c290a345699e31342e9902d5307e
|
[
"MIT"
] | null | null | null |
src/IArandom.py
|
Couapy/othello-isn
|
6a2d007cbde9c290a345699e31342e9902d5307e
|
[
"MIT"
] | null | null | null |
from random import *
def Jouer(cases_possibles):
return choice(cases_possibles)
| 17.8
| 35
| 0.741573
| 11
| 89
| 5.818182
| 0.818182
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191011
| 89
| 4
| 36
| 22.25
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
551ebec74913e76b6c96d0b7b8f5ea952530667a
| 26,424
|
py
|
Python
|
tests/test_FP_loop.py
|
GBillotey/Fractalshades
|
e100b12db031f016bf1a8a1f4fad9ca1c64a0302
|
[
"MIT"
] | null | null | null |
tests/test_FP_loop.py
|
GBillotey/Fractalshades
|
e100b12db031f016bf1a8a1f4fad9ca1c64a0302
|
[
"MIT"
] | 1
|
2021-11-01T14:55:57.000Z
|
2021-11-01T14:55:57.000Z
|
tests/test_FP_loop.py
|
GBillotey/Fractalshades
|
e100b12db031f016bf1a8a1f4fad9ca1c64a0302
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import unittest
import shutil
import numpy as np
import numba
import mpmath
import fractalshades as fs
import fractalshades.utils as fsutils
import fractalshades.colors as fscolors
import fractalshades.models as fsm
import fractalshades.numpy_utils.xrange as fsx
import fractalshades.numpy_utils.numba_xr as fsxn
# import fractalshades.bivar_series
from fractalshades.postproc import (
Postproc_batch,
Raw_pp,
Attr_normal_pp,
Attr_pp,
Fractal_array
)
import test_config
from fractalshades.colors.layers import (
Color_layer,
Bool_layer,
Normal_map_layer,
Blinn_lighting
)
import fractalshades.mpmath_utils.FP_loop as fsFP
Xr_template = fsx.Xrange_array.zeros([1], dtype=np.complex128)
@numba.njit
def numba_path_loop(
ref_path, has_xr, ref_index_xr, ref_xr, ref_div_iter, drift_xr, dx_xr
):
npts = ref_path.size
out_is_xr = np.zeros((1,), dtype=numba.bool_)
out_xr = Xr_template.repeat(1)
refpath_ptr = np.zeros((2,), dtype=np.int32)
print("npts", npts, has_xr, "\n ref index:\n", ref_index_xr)
count = 0
for j in range(npts):
for i in (j , j, npts - j - 1):
val = fs.perturbation.ref_path_get(
ref_path, i, has_xr, ref_index_xr, ref_xr, refpath_ptr,
out_is_xr, out_xr, 0
)
if out_is_xr[0]:
print("is_xr", i, val, fsxn.to_Xrange_scalar(out_xr[0]))
count += 1
print("count xr", count)
if count != 14 * 3:
print("count", count)
raise ValueError("Unexpected count")
count = 0
for i in (7433792, 7433795, 8785472, 8785473, 9461312, 675720, 675728):
# Y N Y N Y N Y
val = fs.perturbation.ref_path_get(
ref_path, i, has_xr, ref_index_xr, ref_xr, refpath_ptr,
out_is_xr, out_xr, 0
)
if out_is_xr[0]:
print("is_xr", i, val, fsxn.to_Xrange_scalar(out_xr[0]))
count += 1
else:
print("** NOT xr", i, val)
if count != 4:
print("count", count)
raise ValueError("Unexpected count")
@numba.njit
def numba_c_from_pix(path, pix):
return path.c_from_pix(pix)
class Test_ref_path(unittest.TestCase):
@classmethod
# @test_config.no_stdout
def setUpClass(cls):
fs.settings.enable_multiprocessing = True
fs.settings.no_newton = True
fs.settings.inspect_calc = True
precision = 3520
nx = 800
x = '-1.9409989391128007782656638595713128206620929316331395903205283705275932149841553750079140508152501109445961064000387852149507811657094626324996392008081820445955741490587617909708619603737265548027769325647808985287741667276189821676033432683374240723052323372896622554689290278821522432095519048328761094875168059910075072612524746195696519482376711787954155676296696827707057348137590781477540653443160271404114741216279924299516050033371623738987930710049260335938454436747992050897445704854917586460267198917634232454874517524790905068408711299098852857223323363509317448492707948571935557902448516804312250656708860690680767226144394692148838449346680921087412029850014210409147937112323614271639154365986968749816836442985665512979922489943829925482859841402388822224364772960765860128299173467963835512792813373451933644130190266047607001031626499249499592567711348988794983423352102489653363614657987130851011066068082416311059571884201802812522326939248656260215898332770887339844184688424916821959905805787211079924762420560654209080231130357236288188593275206143270109163936044056855567309338390204460230556526667618113052517191169646813610992208066490740332700166077086244561644939752386971282938070707062898838928187674154565542324706485606883204149973662143729325062503353762046809254607154103878222668282005954040495000651634097511941293052468376780564225465557438420172736278899353415715205080501056910932380856513690069593717239355697113322999606963893343303065997244593517188694362601778555657829079220370979486386183376634551544169026880446433151630826730127399985709844229666877539084763034446297595098204169627029966553348731711298433915468877133916519870332995252770006087468201433091412692008675169426600509762262849033820684824479730400854046509072164630272105114166613615665383021053646289448207336461725630828678598527683609575006544933462912457658924436663804582292428129309162915840098216747977268766925226272677267826315722555021136934491464926926641085339160830952887601459585519624489323898936587933143756193630971066578717659019875144049965572880866540996031144922280813352065159362962936897218127976473669535727210317367178865163942427120257230318803642220091013441782124465936161868040076934432584798273802125003893761405910549636791922164569969871504895180875775512279622397659490539731258965222183682582044022842758452337516752189727551206382556078493830490372988205049395299138260871313038171904760429268109644267193074206275040851482988811238053209498575928806745490180665861235757156293268030156174736154214485511919238045324816790747039434094153238651378208655247035749519428374239948111490578363711926298127059816373882058600875440218265729937727712935557101248183859985480838214443248343204994169001603385068217409551664275124868238925925271002064990910751541295196946319404974130124223074815816387748372081603618046256402766723419509314015491326315372861880224396707850752490829513864536227468094212074909783507683557390914984737208904927522859784984066452431380596052384391155762961147112917902257288838205513568126100751182438074841839964967562205987620459771593676482435160564881907643374624834394770129519338651384779340621276744712596399177749754956987947612707663018919330037816063293842647052555147743226921275393227281792532802856285703297338604821969492356674112869979073125870095512233460880231177088317720580337642382172126187069216048936896730950168087435988621276438670059341103609929304930466412268150569753470717829497601938341623581803667066999928999945000062'
y = '-0.0006521165369165588520106289441620153907907521525225557951700039268755659160275378414816331241993503713942651869474366440330624054932785747734116130598457275168672169867853790149073948820621927863546898987531675745541556010963860271946131945706089440068213570737152573434606181998626256475661137064241766615685133034114571184540746713081041577482152866404680905298142203271097108866125320734562827910017740404764291477614758081664091324083106696109319507742512146578699926177581123430550120851818916049981949393089874937840577370413575565615246397463453690404270526656455145637869566754373564864548747775061651693403960187403612827482714675143082173905414385810506804378880397100996175280822311114495867725750471436402145707242763362689139153766093202506743259707579782531683072699910204376229255257696447791057044885184061849070063540925613028401048182129422816270970456315092465855569329878796473503666036123284601909076758201573065328180211040459230345709044071756847669905912521106047214804555579992552727318466143562534207465701332898411609149336015158023746864705973770293526683875460324480616782478489019514943512702395590818455582259983339029054638765126731537575594335734368117123722683120375030995584809981966023016675121788001130361752945926045051983789243281329028107416493849599211739205918880442308088915329310667744587253842928202077978689211781621700292204988439971992046135099101850443216579189710924423016693808479474589682525790322932538431715348758724089186172736870724706725359784401019519888555644853285575115223472590818823322033130852641478536530503881747200363162574382337579455223211205019832848615171631087121056343365803496414693646695845027511119821045191586941544022389773784151557473277272394880876628653639136977979073123486169650096416150642999247909147333278062324113459547152270378118487801961875006181455991513879900323624590458328414797373565255061007383050772917420374420930369627261609756033085579925058681478773760867701230719359928389502388023578804808713069253869301107296738982313988108484002367456921622985540672687977893371677916030176767500564905285025226973308704535270965189005321129735333599100313629076978281635241128387571784303118677495016595486491171040002394480779899042204488631259847989603182340726213078367178896618081990169319498713349339065257474424401748553283927933449943175175157120972516636257833849555669271463331231601029167028638597915809746995436188809835668111701784052366810307436108276491541042658178481843136392746657892940367221519240125914939061964441432380740020708127640600546604568699045234845708728863090863984386209155813013615576381026653379878624402126265227089167061378994809588030662831377110537145242600584959148498586439529663105983709419546957848439948376427305067215182145348517650481959560955434577158090652441197554228656503253796471623707876797570793456353888545895776536724341010890647565137237971578364800606022054805371016117249815862385204930532791360055457643453800167233033393824944921504096748637258867979270585206447548364249344195079436376739232814985700753366335710763351616828921383429188346008648781525793755795069682228036514982477038907976343304196109685257025904974333612600761354191140826329760186432247441069680365217200145218033541210372615053282512008534408785235009976598833958899392833195540809260984815364215770028371283427130718815533338521166040923413722562752702386025562655776477893889452984598715385588865771230862335806477085969230662862126372402082027768431991530300520064005268033000000000000000000'
dx = '2.e-2608'
# complex_type = np.complex128
subset_dir = os.path.join(
test_config.temporary_data_dir,
"_numba_ref_path_dir"
)
fsutils.mkdir_p(subset_dir)
cls.subset_dir = subset_dir
cls.calc_name = "test"
# cls.dir_ref = os.path.join(test_config.ref_data_dir, "subset_REF")
cls.f = f = fsm.Perturbation_mandelbrot(subset_dir)
f.zoom(precision=precision, x=x, y=y, dx=dx, nx=nx, xy_ratio=1.0,
theta_deg=0., projection="cartesian", antialiasing=False)
f.calc_std_div(
# datatype=complex_type,
calc_name=cls.calc_name,
subset=None,
max_iter=10000000,
M_divergence=1.e3,
epsilon_stationnary=1.e-3,
interior_detect=False,
SA_params={"cutdeg": 2, "eps": 1.e-8}, # 7886 : for 7884 partial
calc_dzndc=False)
print("f.iref", f.iref)
f.iref = 0
print("################# before get_FP_orbit")
f.get_FP_orbit()
print("################# after get_FP_orbit")
cls.FP_params = f.FP_params
cls.ref_path = f.Zn_path
def test_numba_path(self):
"""
14 xr indices
[ 675728 1351568 2027296 2703248 3378976 4054816 4730544 5406496 6082224
6758064 7433792 8109744 8785472 9461312]"""
# ref_path = self.f.get_Ref_path()
(ref_path, has_xr, ref_index_xr, ref_xr, ref_div_iter, ref_order, drift_xr, dx_xr
) = self.f.get_path_data()
print("ref_path", ref_path)
numba_path_loop(
ref_path, has_xr, ref_index_xr, ref_xr, ref_div_iter, drift_xr, dx_xr
)
def test_print(self):
fs.perturbation.PerturbationFractal.print_FP(
self.FP_params, self.ref_path
)
class Test_newton(unittest.TestCase):
def test_ball_method(self):
"""
Test Cython ball method
"""
ball_order = {
"x": "-1.38489865821023436791757551552306535886843948840412919252407990736888673896107",
"y": "0.0227499123767246576841168164274146683393042605219762097416661295912819114324773",
"maxiter": 100000,
"radius_pixels": 3,
"radius": "1.253026641642179625e-70",
"M_divergence": 1000.0,
"order": 4252
}
x = ball_order["x"]
y = ball_order["y"]
r0 = ball_order["radius"]
precision = int(78 * 3.33) # precision in bits
order = fsFP.perturbation_mandelbrot_ball_method(
str(x).encode('utf8'),
str(y).encode('utf8'),
precision,
str(r0).encode('utf8'),
ball_order["maxiter"],
ball_order["M_divergence"]
)
print("order", order)
self.assertEqual(order, ball_order["order"])
def test_str_out(self):
x = "-1.50000000000000000000000000000000000000000000000000000000000001"
precision = int(len(x) * 3.33)
mpmath.mp.prec = precision
ret = fsFP._test_mpfr_to_python(
str(x).encode('utf8'),
precision
)
is_equal = mpmath.almosteq(mpmath.mpf(x) * 2., ret)
print("ret", ret, type(ret))
print("ret * 2", ret * 2)
print(float(ret))
self.assertTrue(is_equal)
y = "-1.50000000000000000000000000000000000000000000000000000000000001"
precision = int(len(x) * 3.33)
ret = fsFP._test_mpc_to_python(
str(x).encode('utf8'),
str(y).encode('utf8'),
precision
)
print(ret)
print(dir(ret))
print(ret.imag, type(ret.imag))
print(float(ret.imag))
# a = mpmath.mpc(
# mpmath.mpf(ret.real.as_mantissa_exp()),
# mpmath.mpf(ret.imag.as_mantissa_exp())
# )
is_equal = mpmath.almosteq(mpmath.mpf(x) * (1. + 1j), ret)
print(is_equal)
self.assertTrue(is_equal)
def test_newton(self):
"""
Test Cython newton method
"""
newton_search = {
"x_start": "-1.38489865821023436791757551552306535886843948840412919252407990736888673946771734",
# -1.384898658210234367917575515523065358868439488404129192524079907368886731071071
"y_start": "0.0227499123767246576841168164274146683393042605219762097416661295912819134778267851",
"order": 4252,
# -1.384898658210234367917575515523065358868439488404129192524079907368886731071071"
"x_nucleus": "-1.384898658210234367917575515523065358868439488404129192524079907368886731071070290597737327432208840677453517173286448408866226044469621428477672357862724621488179028111732996542591281209148762118889234892037810796912513242881089802259331283320590242459763937836417161921527945857754559159319285383388065105320462521737268935854537254334081892499119184659981831545828333620648594267400219876222339466444419570130912837345844427308581414807326890483942795318275203584063292725367898743833997728784162826193628251523069755931013368411822190869321433789564544441585133097038489340740401348693780401645998731914380562063495469379430034192833658874660407862857944557262450884949376176485242263883332366742107603556473032701375817056416109169174003851015583671029376503919964379185805386973459627223242717548185397791661030283940357630242077950828221222761345551013939547504363904891452248134959442967751256635420953269361209693142594751827252276311622211161303338713979387498146022555169458560196959431871292976784477566279809987430287392674863384462176841792217105111645585061149478516056187429670357540451584091730367671802224669203260354259074441057400338852271137370597619479063946211724981190636722057148412523719407928857594000998351741891319181825479539120938921559113929429837241097240999763366672555498654838371701089925378492586384604575774588053016794521112694182187419806040838810594490140975831449433315007137466504477415629759911137969660764255628152461579806142566466449124890768241003530265410721759548010502473087774944305621076745345290581374121843593772783992094679266476532974848358115409883409689238746613390059112346135143734842796331105408303889560627944284297643019691733262427261916197896130042069856228805916942520541952967086438748232278951326182431433894528986942084948117111513036920627128726094889130088748930129403425796896801983706618026928855677717907574861885240438957301376359009785124937415946628171498544918239692049650684317929521661550684735160900072378209293274343835975392116213583081848096758294069277767163769152819596241698026068104100443690368498641842969647166021531796621557668344692626249974606083378018842624731144995170567814145619634650463990257153200014705543982893973757842416417820304612581777566216837658169909814169440092746576335835098963395705271146613845201996459979302162346673064003186518770906488677117370116939332474769708384422290588262596274070150009629962935250095760660371483134194932278960224049065837640766189638531338144873811957391221091189356404060868126759178113102134453760794308221409826015639123643227761785779462100892435413298488572582041971557284512888540575326096827707762308681757124108440265214018015454332578708469255029949216987491131310454344950705159583116745438937285217159110821755079801011043089055951101274140132218431575020160520464933779172769220322899291896495798852628445100151776277336891147664053100229545550893340495566461430485847441553687227850322369201543764030791477427743602367226508284650107332830870941688118261829055293684480892916096509513257172569369773483638202147216251906820294188440494826073983239414485919996068532801114187089293959470994399531875598400639481899592705652612995793891273687001796161288375352871412483742568114114333811883789177126487510484415032705474559228643152165499519515065933939609956399298992509076827007723345792984020456899748496005006747798330097835898418452660482943473941789529554687641528914237042847875897579678796304046953546419858883298767417601641900931960342240020493789122651064",
"y_nucleus": "0.02274991237672465768411681642741466833930426052197620974166612959128193946599148663186825181400444801617902649746806986512836765355177903929141487291683110650978461712462524687126133894875596060308681361104833937528120348545366959122931144861989474428824041235640513711128271395006129931760926892841430434944360116868670528335319769241973236989333953830700644975630936980081530182606798363910233428393712610721672702307367478384840762969935947898265290497971257548401454342622724819054947703466325891518996898444847755836312159070427425530522626305221612707628912420367401353915076925646997332810781726997188872553836780195943610730461291000447968626131265356928903193789956336871991855398664589981778911852346698606110912746334808586202348059280620487697328192294599478384339499955964505585016647296172942826891221285671615887805243446787277399610483064104718775312209863851691540079419951662873752956844077804483606220177402951076090919048548882834980184087105282050028584606602280161455941010223184600649246559311064438481399357643787014775659058517659844777792673179277016030869941750791665874059099872067480342929268437753109606495110846803946033879691682368471370782381873518785492205170508743931724987766853562421777707611931574109403499438780979579145822038475927284789183803331626623203423328865605605714022837667637217986098934905700572834627663035072349710098142630056586758504392030909497301367626010390038849757543029936188135957260231103066541685149275531948779898210446009843622801340293733204034917653186732997729872409542920840578952875280173036440726688634383226906795479812112850408574421645735398524467540399629397951897284902011848690229677440280928964895140060438241750659537420047862387843078311544680447481063179572104824044132364190550559402772107342368169119639746361790000212704083306665845759989019932885593184934678464280631470852709516522194744801017054738958179981093671269759564160133507409509794667835185910813779922915851775931689297034580502259010325584871138943616102271820201450109434558108214463968881695570510623250163633895206410719849984215787324622253700200123257094829690065737926302030563039471262994128923247934489520076295937940715125500589390271463847802808716916054940726125865174867319944467953622748681213861887922154541729800764344421696151229219579308114931121163670891657190289916657590109284793324632455907281806538817932094507269636801746632242063636456132003769269606371214308660473980268455976527592186575377374400877440857097563032203825886708721992762579522190196819458675618729177099950191480595760045869448677154498032710518356116184402530957106267612384132671925147775943068806550989509319014324066642386918837372440528786995664105806947281219784909324164428213864372668955128776320531547273003793924255816758008153517203649953490669231338984964677497707773706731377133321320709438739797078857519267931064685463095366122293841160283549330426185711473729449237131851762471331548604120200999897025646626746384166206359292874381760858754652216027816952480190791698144851155013112459358463168100209942581466058418656370690811294322094354350871386054984191414163005245993506775113207165765364938298440142673518637655755085818994789733903416061759698648770332901147863373593785422162004965272876477501550673032814150382610403569366848966477835395924891081465661604955885574229809273601533307483102106387977778081572012569967160272697015243920849565452956090988298448875399315697690578185407249503667923686545839968981499842959967983934762594249424971212143006331771",
}
x = newton_search["x_start"]
y = newton_search["y_start"]
precision = int(3522 * 3.3) # precision in bits
pix = int(84 * 3.3) # precision in bits
mpmath.mp.prec = precision
print("prec", precision)
# eps_cv = mpmath.mpf(2.)**(-mpmath.mp.prec) is a good first estimate
eps = mpmath.mpf(val=(2, -precision))
eps_pix = mpmath.mpf(val=(2, -pix))
print("eps", eps)
is_ok, val = fsFP.perturbation_mandelbrot_find_nucleus(
x.encode('utf8'),
y.encode('utf8'),
precision,
newton_search["order"],
80,
str(eps).encode('utf8'),
str(eps_pix).encode('utf8'),
)
with mpmath.workprec(precision - 10):
print(val)
expected = mpmath.mpf(newton_search["x_nucleus"])
print("**expected", expected.real)
print("**got", is_ok, val.real)
is_equal = mpmath.almosteq(expected.real, val.real)
self.assertTrue(is_equal)
class Test_BS(unittest.TestCase):
def test_path(self):
x = 0.1
y = 0.1
c = x + 1j * y
npts = 10
orbit_complex = np.empty((npts,), dtype=np.complex128)
orbit_float = orbit_complex.view(np.float64)
(i, partial_dict, xr_dict
) = fsFP.perturbation_BS_FP_loop(
orbit_float,
False,
npts - 1,
1000.,
str(x).encode('utf8'),
str(y).encode('utf8'),
53
)
print("orbit_complex", orbit_complex)
print("i", i)
z = complex(0.)
for i in range(10):
assert orbit_complex[i] == z
x = z.real
y = z.imag
new_x = x ** 2 - y ** 2 + c.real
new_y = 2 * np.abs(x * y) - c.imag
z = new_x + 1j * new_y
# print("new_z", i + 1, z)
def test_order(self):
x_str = "-1.7545128115395"
y_str = "0.0015894811966473"
seed_px = "0.01"
order = fsFP.perturbation_BS_ball_method(
x_str.encode('utf8'),
y_str.encode('utf8'),
53,
seed_px.encode('utf8'),
100,
1000.
)
print("order", order)
assert order == 3
x_str = "0.88410156557344"
y_str = "1.5218981991448"
seed_px = "0.01"
order = fsFP.perturbation_BS_ball_method(
x_str.encode('utf8'),
y_str.encode('utf8'),
53,
seed_px.encode('utf8'),
100,
1000.
)
assert order == 3
def test_newton(self):
x_str = "-1.7545128115395"
y_str = "0.0015894811966473"
seed_px = "0.01"
seed_prec = 53
order = 3
eps_cv = mpmath.mpf(val=(2, -seed_prec))
is_ok, val = fsFP.perturbation_BS_find_any_nucleus(
x_str.encode('utf8'),
y_str.encode('utf8'),
seed_prec,
order,
40,
str(eps_cv).encode('utf8'),
seed_px.encode('utf8'),
)
assert order == 3
print(is_ok, val)
x_str = "0.88410156557344"
y_str = "1.5218981991448"
seed_px = "0.012"
seed_prec = 53
order = 3
eps_cv = mpmath.mpf(val=(2, -seed_prec))
is_ok, val = fsFP.perturbation_BS_find_any_nucleus(
x_str.encode('utf8'),
y_str.encode('utf8'),
seed_prec,
order,
40,
str(eps_cv).encode('utf8'),
seed_px.encode('utf8'),
)
assert order == 3
print(is_ok, val)
# x_str = "-1.7545128115395"
# y_str = "0.0015894811966473"
# seed_px = "0.001"
#
# order = fsFP.perturbation_BS_ball_method(
# x_str.encode('utf8'),
# y_str.encode('utf8'),
# 53,
# seed_px.encode('utf8'),
# 100,
# 1000.
# )
# assert order == 3
if __name__ == "__main__":
full_test = False
runner = unittest.TextTestRunner(verbosity=2)
if full_test:
runner.run(test_config.suite([Test_ref_path,
Test_newton]))
else:
suite = unittest.TestSuite()
# suite.addTest(Test_bivar_SA("test_basic"))
## suite.addTest(Test_bivar_SA("test_SA"))
# suite.addTest(Test_bivar_SA("test_bivar_SA"))
# suite.addTest(Test_newton("test_ball_method"))
# suite.addTest(Test_newton("test_str_out"))
# suite.addTest(Test_newton("test_newton"))
# suite.addTest(Test_ref_path("test_print"))
# suite.addTest(Test_bivar_SA("test_bivar_SA"))Test_BS
suite.addTest(Test_BS("test_path"))
suite.addTest(Test_BS("test_order"))
suite.addTest(Test_BS("test_newton"))
runner.run(suite)
| 64.764706
| 3,551
| 0.791137
| 1,466
| 26,424
| 14.020464
| 0.193042
| 0.014109
| 0.008563
| 0.003162
| 0.103386
| 0.09132
| 0.087234
| 0.079838
| 0.067189
| 0.063589
| 0
| 0.681755
| 0.153913
| 26,424
| 407
| 3,552
| 64.923833
| 0.237599
| 0.064411
| 0
| 0.327815
| 0
| 0
| 0.620956
| 0.592058
| 0
| 1
| 0
| 0
| 0.029801
| 1
| 0.036424
| false
| 0
| 0.05298
| 0.003311
| 0.102649
| 0.10596
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b44a66aa1f5403cc83f43901a44841b471d88c4
| 82,996
|
py
|
Python
|
tests/test_cases.py
|
Lucid-Lynx/FileServerApp
|
3823381525d2a08d5bbffedb746acec830b6d67f
|
[
"FSFAP"
] | 3
|
2019-11-27T10:04:42.000Z
|
2022-02-01T06:51:27.000Z
|
tests/test_cases.py
|
Lucid-Lynx/FileServerApp
|
3823381525d2a08d5bbffedb746acec830b6d67f
|
[
"FSFAP"
] | null | null | null |
tests/test_cases.py
|
Lucid-Lynx/FileServerApp
|
3823381525d2a08d5bbffedb746acec830b6d67f
|
[
"FSFAP"
] | 1
|
2021-03-12T11:16:02.000Z
|
2021-03-12T11:16:02.000Z
|
# Copyright 2019 by Kirill Kanin.
# All rights reserved.
import os
import pytest
import json
import logging
import server.utils.utils as utils
from aiohttp import web
from server.web.handler import Handler
from server.db.database import DataBase
from server.crypto.crypto import HashAPI, AESCipher, RSACipher
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
extension = 'txt'
test_folder = '../test_files_1'
test_file_1 = 'test1_low.txt'
test_file_2 = 'test2_test.txt'
test_file_3 = 'test3.txt'
test_file_4 = 'test4_low.txt'
test_signature_file_4 = 'test4_low.md5'
test_file_5 = 'test5_high.txt'
test_signature_file_5 = 'test5_high.md5'
test_file_6 = 'test6_medium.txt'
test_signature_file_6 = 'test6_medium.md5'
test_file_7 = 'test7_low.txt'
test_signature_file_7 = 'test7_low.md5'
test_file_8 = 'test8_low.txt'
test_content = 'Test content/n'
def create_and_move_to_test_folder():
if not os.path.exists(test_folder):
os.mkdir(test_folder)
def create_test_files():
full_test_file_1 = f'{test_folder}/{test_file_1}'
with open(full_test_file_1, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
file_handler.write(data)
full_test_file_2 = f'{test_folder}/{test_file_2}'
with open(full_test_file_2, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
file_handler.write(data)
full_test_file_3 = f'{test_folder}/{test_file_3}'
with open(full_test_file_3, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
file_handler.write(data)
full_test_file_4 = f'{test_folder}/{test_file_4}'
with open(full_test_file_4, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
file_handler.write(data)
full_test_file_7 = f'{test_folder}/{test_file_7}'
with open(full_test_file_7, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
file_handler.write(data)
@pytest.fixture
def client(loop, aiohttp_client):
create_and_move_to_test_folder()
create_test_files()
handler = Handler(test_folder)
app = web.Application()
app.router.add_get('/', handler.handle)
app.router.add_get('/files/list', handler.get_files)
app.router.add_get('/files', handler.get_file_info)
app.router.add_post('/files', handler.create_file)
app.router.add_delete('/files/{filename}', handler.delete_file)
app.router.add_get('/files/download', handler.download_file)
app.router.add_get('/files/download/queued', handler.download_file_queued)
app.router.add_post('/signup', handler.signup)
app.router.add_post('/signin', handler.signin)
app.router.add_get('/logout', handler.logout)
app.router.add_put('/method/{method_name}', handler.add_method)
app.router.add_delete('/method/{method_name}', handler.delete_method)
app.router.add_put('/role/{role_name}', handler.add_role)
app.router.add_delete('/role/{role_name}', handler.delete_role)
app.router.add_post('/add_method_to_role', handler.add_method_to_role)
app.router.add_post('/delete_method_from_role', handler.delete_method_from_role)
app.router.add_post('/change_shared_prop', handler.change_shared_prop)
app.router.add_post('/change_user_role', handler.change_user_role)
app.router.add_post('/change_file_dir', handler.change_file_dir)
return loop.run_until_complete(aiohttp_client(app)), handler
@pytest.fixture(scope='function')
def prepare_data(request):
logger.info('Prepare test data in database')
db = DataBase()
db_session = db.create_session()
testing_methods = db_session.query(db.Method).filter(db.Method.name.in_([
'get_files', 'get_file_info', 'get_file_info_signed', 'create_file', 'delete_file', 'download_file',
'download_file_queued', 'add_method', 'delete_method', 'add_role', 'delete_role', 'add_method_to_role',
'delete_method_from_role', 'change_shared_prop', 'change_user_role', 'change_file_dir'])).all()
test_method = db.Method('test_method_1')
testing_methods.append(test_method)
test_role_denied = db.Role('test_role_1')
test_role_allowed = db.Role('test_role_2', methods=testing_methods)
test_role_no_user = db.Role('test_role_3')
session_denied = db.Session(
db.User('user1@test.su', HashAPI.hash_sha512('1test1234'), 'User1', role=test_role_denied))
session_allowed = db.Session(
db.User('user2@test.su', HashAPI.hash_sha512('2test1234'), 'User2', role=test_role_allowed))
session_no_role = db.Session(
db.User('user3@test.su', HashAPI.hash_sha512('3test1234'), 'User3'))
user_without_session = db.User('user4@test.su', HashAPI.hash_sha512('4test1234'), 'User4')
db_session.add_all([session_denied, session_allowed, session_no_role, test_role_no_user, user_without_session])
db_session.commit()
user = db_session.query(db.User).filter_by(email='user2@test.su').first()
full_test_file_4 = f'{test_folder}/{test_file_4}'
file_dict_4 = {
'name': test_file_4,
'create_date': utils.convert_date(os.path.getctime(full_test_file_4)),
'size': os.path.getsize(full_test_file_4),
'content': test_content,
'user_id': user.id,
}
full_test_signature_file_4 = f'{test_folder}/{test_signature_file_4}'
signature = HashAPI.hash_md5('_'.join(list(str(x) for x in list(file_dict_4.values()))))
with open(full_test_signature_file_4, 'wb') as file_handler:
data = bytes(signature, 'utf-8')
file_handler.write(data)
cipher = RSACipher(user.id, test_folder)
full_test_file_5 = f'{test_folder}/{test_file_5}'
with open(full_test_file_5, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
cipher.write_cipher_text(data, file_handler, test_file_5.split('.')[0])
file_dict = {
'name': test_file_5,
'create_date': utils.convert_date(os.path.getctime(full_test_file_5)),
'size': os.path.getsize(full_test_file_5),
'content': test_content,
'user_id': user.id,
}
full_test_signature_file_5 = f'{test_folder}/{test_signature_file_5}'
signature = HashAPI.hash_md5('_'.join(list(str(x) for x in list(file_dict.values()))))
with open(full_test_signature_file_5, 'wb') as file_handler:
data = bytes(signature, 'utf-8')
file_handler.write(data)
cipher = AESCipher(user.id, test_folder)
full_test_file_6 = f'{test_folder}/{test_file_6}'
with open(full_test_file_6, 'wb') as file_handler:
data = bytes(test_content, 'utf-8')
cipher.write_cipher_text(data, file_handler, test_file_6.split('.')[0])
file_dict = {
'name': test_file_6,
'create_date': utils.convert_date(os.path.getctime(full_test_file_6)),
'size': os.path.getsize(full_test_file_6),
'content': test_content,
'user_id': user.id,
}
full_test_signature_file_6 = f'{test_folder}/{test_signature_file_6}'
signature = HashAPI.hash_md5('_'.join(list(str(x) for x in list(file_dict.values()))))
with open(full_test_signature_file_6, 'wb') as file_handler:
data = bytes(signature, 'utf-8')
file_handler.write(data)
full_test_file_7 = f'{test_folder}/{test_file_7}'
file_dict_7 = {
'name': test_file_7,
'create_date': utils.convert_date(os.path.getctime(full_test_file_7)),
'size': os.path.getsize(full_test_file_7),
'content': 'Test',
'user_id': user.id,
}
full_test_signature_file_7 = f'{test_folder}/{test_signature_file_7}'
signature = HashAPI.hash_md5('_'.join(list(str(x) for x in list(file_dict_7.values()))))
with open(full_test_signature_file_7, 'wb') as file_handler:
data = bytes(signature, 'utf-8')
file_handler.write(data)
request.addfinalizer(teardown)
yield session_denied, session_allowed, session_no_role
def teardown():
logger.info('Clean test data in database')
db = DataBase()
db_session = db.create_session()
test_user_no_role = db_session.query(db.User).filter_by(email='user3@test.su').first()
test_user_without_session = db_session.query(db.User).filter_by(email='user4@test.su').first()
test_user = db_session.query(db.User).filter_by(email='user5@test.su').first()
test_role_denied = db_session.query(db.Role).filter_by(name='test_role_1').first()
test_role_allowed = db_session.query(db.Role).filter_by(name='test_role_2').first()
test_role_no_user = db_session.query(db.Role).filter_by(name='test_role_3').first()
test_role = db_session.query(db.Role).filter_by(name='test_role_4').first()
test_method_1 = db_session.query(db.Method).filter_by(name='test_method_1').first()
test_method_2 = db_session.query(db.Method).filter_by(name='test_method_2').first()
if test_user_no_role:
db_session.delete(test_user_no_role)
if test_user_without_session:
db_session.delete(test_user_without_session)
if test_user:
db_session.delete(test_user)
if test_role_denied:
db_session.query(db.MethodRole).filter_by(role_id=test_role_denied.id).delete()
db_session.delete(test_role_denied)
if test_role_allowed:
db_session.query(db.MethodRole).filter_by(role_id=test_role_allowed.id).delete()
db_session.delete(test_role_allowed)
if test_role_no_user:
db_session.delete(test_role_no_user)
if test_role:
db_session.delete(test_role)
if test_method_1:
db_session.query(db.MethodRole).filter_by(method_id=test_method_1.id).delete()
db_session.delete(test_method_1)
if test_method_2:
db_session.query(db.MethodRole).filter_by(method_id=test_method_2.id).delete()
db_session.delete(test_method_2)
db_session.commit()
class TestSuite:
async def test_connection(self, client):
client, handler = tuple(client)
logger.info('Test request. Method not allowed')
resp = await client.put('/')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request')
resp = await client.get('/')
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
logger.info('Test is succeeded')
async def test_get_files(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
logger.info('Test request. Method not allowed')
resp = await client.put('/files/list')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.get('/files/list')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.get('/files/list', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.get('/files/list', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.get('/files/list', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed')
resp = await client.get('/files/list', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
data = result.get('data')
exists_files = list(filter(
lambda file: file.get('name') in [
test_file_1, test_file_2, test_file_3, test_file_4, test_file_5, test_file_6, test_file_7], data))
exists_files = list(map(lambda file: file.get('name'), exists_files))
assert len(exists_files) == 7
assert test_file_1 in exists_files
assert test_file_2 in exists_files
assert test_file_3 in exists_files
assert test_file_4 in exists_files
assert test_file_5 in exists_files
assert test_file_6 in exists_files
assert test_file_7 in exists_files
assert not (test_file_8 in exists_files)
logger.info('Test is succeeded')
async def test_get_file_info(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_file_part = test_file_4.split('.')[0]
logger.info('Test request. Method not allowed')
resp = await client.put(f'/files?filename={test_file_part}&is_signed=false')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.get(f'/files?filename={test_file_part}&is_signed=false')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. File name is not set')
resp = await client.get(f'/files?is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == '"Key not found: \'filename\'"'
logger.info('Test is succeeded')
logger.info('Test request. Is_signed is not set')
resp = await client.get(f'/files?filename={test_file_part}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == '"Key not found: \'is_signed\'"'
logger.info('Test is succeeded')
logger.info('Test request. Is_signed is invalid')
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=test', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Is_signed is invalid'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is low')
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
assert filename == test_file_4
content = result.get('data').get('content')
assert content == test_content
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is high')
test_file_part = test_file_5.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
assert filename == test_file_5
content = result.get('data').get('content')
assert content == test_content
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is medium')
test_file_part = test_file_6.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
assert filename == test_file_6
content = result.get('data').get('content')
assert content == test_content
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is low. Signatures are match')
test_file_part = test_file_4.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=true', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
assert filename == test_file_4
content = result.get('data').get('content')
assert content == test_content
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is high. Signatures are match')
test_file_part = test_file_5.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=true', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
assert filename == test_file_5
content = result.get('data').get('content')
assert content == test_content
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is medium. Signatures are match')
test_file_part = test_file_6.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=true', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
assert filename == test_file_6
content = result.get('data').get('content')
assert content == test_content
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Security level is invalid')
test_file_part = test_file_2.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Security level is invalid'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File name is invalid')
test_file_part = test_file_3.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Invalid format of file name'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File does not exist')
test_file_part = test_file_8.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'File {test_file_8} does not exist'
assert not os.path.exists(f'{test_folder}/{test_file_8}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Signature file does not exist')
test_file_part = test_file_1.split('.')[0]
signature_file = f'{test_file_part}.md5'
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=true', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Signature file {signature_file} does not exist'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Signatures are not match')
test_file_part = test_file_7.split('.')[0]
resp = await client.get(
f'/files?filename={test_file_part}&is_signed=true', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Signatures are not match'
logger.info('Test is succeeded')
async def test_create_file(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
logger.info('Test request. Method not allowed')
resp = await client.put('/files', json={'content': test_content, 'security_level': 'high'})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.post('/files', json={'content': test_content, 'security_level': 'high'})
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'high'},
headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'high'},
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'high'},
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Security level is invalid')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'test'},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Security level is invalid'
logger.info('Test is succeeded')
logger.info(
'Test request. Access allowed. Content is not empty. Security level is not empty. File is not signed')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'high', 'is_signed': False},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Content is empty. Security level is not empty. File is not signed')
resp = await client.post(
'/files', json={'security_level': 'high', 'is_signed': False},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Content is not empty. Security level is empty. File is not signed')
resp = await client.post(
'/files', json={'content': test_content, 'is_signed': False},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Content is not empty. Security level is not empty. File is signed')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'high', 'is_signed': True},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
signature_file = f'{filename.split(".")[0]}.md5'
assert os.path.exists(f'{test_folder}/{filename}')
assert os.path.exists(f'{test_folder}/{signature_file}')
logger.info('Test is succeeded')
logger.info(
'Test request. Access allowed. Content is not empty. Security level is not empty. '
'Is_signed parameter is not set')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'high'},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
filename = result.get('data').get('name')
assert os.path.exists(f'{test_folder}/{filename}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Is_signed parameter is invalid')
resp = await client.post(
'/files', json={'content': test_content, 'security_level': 'test', 'is_signed': 'test'},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Is_signed should be boolean'
logger.info('Test is succeeded')
async def test_delete_file(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_file_part = test_file_2.split('.')[0]
logger.info('Test request. Method not allowed')
resp = await client.put(f'/files/{test_file_part}')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.delete(f'/files/{test_file_part}')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.delete(f'/files/{test_file_part}', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.delete(f'/files/{test_file_part}', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.delete(f'/files/{test_file_part}', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists')
resp = await client.delete(f'/files/{test_file_part}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_2} is successfully deleted'
signature_file = f'{test_file_part}.md5'
assert not os.path.exists(f'{test_folder}/{test_file_2}')
assert not os.path.exists(f'{test_folder}/{signature_file}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File does not exist')
test_file_part = test_file_8.split('.')[0]
resp = await client.delete(f'/files/{test_file_part}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'File {test_file_8} does not exist'
assert not os.path.exists(f'{test_folder}/{test_file_8}')
logger.info('Test is succeeded')
async def test_download_file(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_file_part = test_file_4.split('.')[0]
logger.info('Test request. Method not allowed')
resp = await client.put(f'/files/download?filename={test_file_part}&is_signed=false')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.get(f'/files/download?filename={test_file_part}&is_signed=false')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. File name is not set')
resp = await client.get(
'/files/download?is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == '"Key not found: \'filename\'"'
logger.info('Test is succeeded')
logger.info('Test request. Is_signed is not set')
resp = await client.get(
f'/files/download?filename={test_file_part}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == '"Key not found: \'is_signed\'"'
logger.info('Test is succeeded')
logger.info('Test request. Is_signed is invalid')
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=test',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Is_signed is invalid'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is low')
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_4} is successfully downloaded'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is high')
test_file_part = test_file_5.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_5} is successfully downloaded'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is medium')
test_file_part = test_file_6.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_6} is successfully downloaded'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is low. Signatures are match')
test_file_part = test_file_4.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=true',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_4} is successfully downloaded'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is high. Signatures are match')
test_file_part = test_file_5.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=true',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_5} is successfully downloaded'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File exists. Security level is medium. Signatures are match')
test_file_part = test_file_6.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=true',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'File {test_file_6} is successfully downloaded'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Security level is invalid')
test_file_part = test_file_2.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Security level is invalid'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File name is invalid')
test_file_part = test_file_3.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Invalid format of file name'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. File does not exist')
test_file_part = test_file_8.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'File {test_file_8} does not exist'
assert not os.path.exists(f'{test_folder}/{test_file_8}')
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Signature file does not exist')
test_file_part = test_file_1.split('.')[0]
signature_file = f'{test_file_part}.md5'
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=true',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Signature file {signature_file} does not exist'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Signatures are not match')
test_file_part = test_file_7.split('.')[0]
resp = await client.get(
f'/files/download?filename={test_file_part}&is_signed=true',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Signatures are not match'
logger.info('Test is succeeded')
async def test_download_file_queued(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_file_part = test_file_4.split('.')[0]
logger.info('Test request. Method not allowed')
resp = await client.put(f'/files/download/queued?filename={test_file_part}&is_signed=false')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.get(f'/files/download/queued?filename={test_file_part}&is_signed=false')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.get(
f'/files/download/queued?filename={test_file_part}&is_signed=false', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.get(
f'/files/download/queued?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.get(
f'/files/download/queued?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. File name is not set')
resp = await client.get(
f'/files/download/queued?is_signed=false', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == '"Key not found: \'filename\'"'
logger.info('Test is succeeded')
logger.info('Test request. Is_signed is not set')
resp = await client.get(
f'/files/download/queued?filename={test_file_part}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == '"Key not found: \'is_signed\'"'
logger.info('Test is succeeded')
logger.info('Test request. Is_signed is invalid')
resp = await client.get(
f'/files/download/queued?filename={test_file_part}&is_signed=test',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Is_signed is invalid'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed')
resp = await client.get(
f'/files/download/queued?filename={test_file_part}&is_signed=false',
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'Request for downloading file {test_file_4} is successfully added into queue'
logger.info('Test is succeeded')
async def test_signup(self, client, prepare_data):
client, handler = tuple(client)
test_email = 'user5@test.su'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.put('/signup', json={
'email': test_email,
'password': '5test1234',
'confirm_password': '5test1234',
'name': 'User5',
})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User does not exist')
resp = await client.post('/signup', json={
'email': test_email,
'password': '5test1234',
'confirm_password': '5test1234',
'name': 'User5',
})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'User with email {test_email} is successfully registered'
assert db_session.query(db.User).filter_by(email=test_email).first()
logger.info('Test is succeeded')
logger.info('Test request. Email is not set')
resp = await client.post('/signup', json={
'password': '5test1234',
'confirm_password': '5test1234',
'name': 'User5',
})
assert resp.status == 400
assert await resp.text() == 'Email is not set'
logger.info('Test is succeeded')
logger.info('Test request. Invalid email format')
resp = await client.post('/signup', json={
'email': 'user5',
'password': '5test1234',
'confirm_password': '5test1234',
'name': 'User5',
})
assert resp.status == 400
assert await resp.text() == 'Invalid email format'
logger.info('Test is succeeded')
logger.info('Test request. Password is not set')
resp = await client.post('/signup', json={'email': test_email, 'name': 'User5'})
assert resp.status == 400
assert await resp.text() == 'Password is not set'
logger.info('Test is succeeded')
logger.info('Test request. Invalid password')
resp = await client.post('/signup', json={
'email': test_email,
'password': 'test',
'confirm_password': '5test1234',
'name': 'User5',
})
assert resp.status == 400
assert await resp.text() == \
'Invalid password. Password should contain letters, digits and will be 8 to 50 characters long'
logger.info('Test is succeeded')
logger.info('Test request. Password is not confirmed')
resp = await client.post('/signup', json={
'email': test_email,
'password': '5test1234',
'name': 'User5',
})
assert resp.status == 400
assert await resp.text() == 'Please, repeat the password'
logger.info('Test is succeeded')
logger.info('Test request. Passwords are not match')
resp = await client.post('/signup', json={
'email': test_email,
'password': '5test1234',
'confirm_password': '5test12345',
'name': 'User5',
})
assert resp.status == 400
assert await resp.text() == 'Passwords are not match'
logger.info('Test is succeeded')
logger.info('Test request. Name is not set')
resp = await client.post('/signup', json={
'email': test_email,
'password': '5test1234',
'confirm_password': '5test1234',
})
assert resp.status == 400
assert await resp.text() == 'Name is not set'
logger.info('Test is succeeded')
logger.info('Test request. User exists')
test_email_exists = 'user1@test.su'
resp = await client.post('/signup', json={
'email': test_email_exists,
'password': '1test1234',
'confirm_password': '1test1234',
'name': 'User1',
})
assert resp.status == 400
assert await resp.text() == f'User with email {test_email_exists} already exists'
logger.info('Test is succeeded')
async def test_signin(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_email = 'user1@test.su'
db = DataBase()
db_session = db.create_session()
test_user = db_session.query(db.User).filter_by(email=test_email).first()
logger.info('Test request. Method not allowed')
resp = await client.put('/signin', json={'email': test_email, 'password': '1test1234'})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User exists. Session exists')
resp = await client.post('/signin', json={'email': test_email, 'password': '1test1234'})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == 'You successfully signed in system'
assert result.get('session_id') == session_denied.uuid
assert db_session.query(db.Session).filter_by(user=test_user).first()
assert len(db_session.query(db.Session).filter_by(user=test_user).all()) == 1
logger.info('Test is succeeded')
logger.info('Test request. Email is not set')
resp = await client.post('/signin', json={'password': '1test1234'})
assert resp.status == 400
assert await resp.text() == 'Email is not set'
logger.info('Test is succeeded')
logger.info('Test request. Invalid email format')
resp = await client.post('/signin', json={'email': 'user1', 'password': '1test1234'})
assert resp.status == 400
assert await resp.text() == 'Invalid email format'
logger.info('Test is succeeded')
logger.info('Test request. Password is not set')
resp = await client.post('/signin', json={'email': test_email})
assert resp.status == 400
assert await resp.text() == 'Password is not set'
logger.info('Test is succeeded')
logger.info('Test request. Invalid password')
resp = await client.post('/signin', json={'email': test_email, 'password': 'test'})
assert resp.status == 400
assert await resp.text() == 'Incorrect login or password'
logger.info('Test is succeeded')
logger.info('Test request. User does not exist')
resp = await client.post('/signin', json={'email': 'user6@test.su', 'password': 'test'})
assert resp.status == 400
assert await resp.text() == 'Incorrect login or password'
logger.info('Test is succeeded')
logger.info('Test request. User exists. Session does not exist')
test_email = 'user4@test.su'
resp = await client.post('/signin', json={'email': test_email, 'password': '4test1234'})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == 'You successfully signed in system'
assert db_session.query(db.Session).filter_by(user=test_user).first()
assert len(db_session.query(db.Session).filter_by(user=test_user).all()) == 1
logger.info('Test is succeeded')
async def test_logout(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.put('/logout')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.get('/logout')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. User is logged in')
resp = await client.get('/logout', headers={'Authorization': session_denied.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == 'You successfully logged out'
assert not db_session.query(db.Session).filter_by(uuid=session_denied.uuid).first()
logger.info('Test is succeeded')
async def test_add_method(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_method_name = 'test_method_2'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.get(f'/method/{test_method_name}')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.put(f'/method/{test_method_name}')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.put(f'/method/{test_method_name}', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.put(f'/method/{test_method_name}', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.put(f'/method/{test_method_name}', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method does not exist')
resp = await client.put(f'/method/{test_method_name}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'You successfully added method {test_method_name}'
assert db_session.query(db.Method).filter_by(name=test_method_name).first()
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method exists')
test_method_name_exists = 'test_method_1'
resp = await client.put(f'/method/{test_method_name_exists}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name_exists} already exists'
logger.info('Test is succeeded')
async def test_delete_method(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_method_name = 'test_method_1'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.get(f'/method/{test_method_name}')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.delete(f'/method/{test_method_name}')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.delete(f'/method/{test_method_name}', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.delete(f'/method/{test_method_name}', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.delete(f'/method/{test_method_name}', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method exists')
resp = await client.delete(f'/method/{test_method_name}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'You successfully deleted method {test_method_name}'
assert not db_session.query(db.Method).filter_by(name=test_method_name).first()
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method does not exist')
resp = await client.delete(f'/method/{test_method_name}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name} is not found'
logger.info('Test is succeeded')
async def test_add_role(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_role_name = 'test_role_4'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.get(f'/role/{test_role_name}')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.put(f'/role/{test_role_name}')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.put(f'/role/{test_role_name}', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.put(f'/role/{test_role_name}', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.put(f'/role/{test_role_name}', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role does not exist')
resp = await client.put(f'/role/{test_role_name}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'You successfully added role {test_role_name}'
assert db_session.query(db.Role).filter_by(name=test_role_name).first()
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role exists')
test_role_name_exists = 'test_role_1'
resp = await client.put(f'/role/{test_role_name_exists}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Role {test_role_name_exists} already exists'
logger.info('Test is succeeded')
async def test_delete_role(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_role_name = 'test_role_3'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.get(f'/role/{test_role_name}')
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.delete(f'/role/{test_role_name}')
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.delete(f'/role/{test_role_name}', headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.delete(f'/role/{test_role_name}', headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.delete(f'/role/{test_role_name}', headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role exists. Role without user')
resp = await client.delete(f'/role/{test_role_name}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'You successfully deleted role {test_role_name}'
assert not db_session.query(db.Role).filter_by(name=test_role_name).first()
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role exists. Role with user')
resp = await client.delete(f'/role/test_role_2', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == "You can't delete role with users"
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role does not exist')
resp = await client.delete(f'/role/{test_role_name}', headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Role {test_role_name} is not found'
logger.info('Test is succeeded')
async def test_add_method_to_role(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_method_name = 'test_method_1'
test_role_name = 'test_role_1'
db = DataBase()
db_session = db.create_session()
test_method = db_session.query(db.Method).filter_by(name=test_method_name).first()
test_role = db_session.query(db.Role).filter_by(name=test_role_name).first()
logger.info('Test request. Method not allowed')
resp = await client.get('/add_method_to_role', json={'method': test_method_name, 'role': test_role_name})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.post('/add_method_to_role', json={'method': test_method_name, 'role': test_role_name})
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method name is not set')
resp = await client.post(
'/add_method_to_role', json={'role': test_role_name}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Method name is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role name is not set')
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Role name is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method does not exist')
test_method_name_not_exists = 'test_method_2'
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name_not_exists, 'role': test_role_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role does not exist')
test_role_name_not_exists = 'test_role_4'
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name, 'role': test_role_name_not_exists},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Role {test_role_name_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method is already added to role')
test_role_name_with_method = 'test_role_2'
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name, 'role': test_role_name_with_method},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name} already exists in role {test_role_name_with_method}'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method is not added to role')
resp = await client.post(
'/add_method_to_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'You successfully added method {test_method_name} to role {test_role_name}'
assert db_session.query(db.MethodRole).filter_by(method_id=test_method.id, role_id=test_role.id).first()
logger.info('Test is succeeded')
async def test_delete_method_from_role(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_method_name = 'test_method_1'
test_role_name = 'test_role_2'
db = DataBase()
db_session = db.create_session()
test_method = db_session.query(db.Method).filter_by(name=test_method_name).first()
test_role = db_session.query(db.Role).filter_by(name=test_role_name).first()
logger.info('Test request. Method not allowed')
resp = await client.get('/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.post('/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name})
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method name is not set')
resp = await client.post(
'/delete_method_from_role', json={'role': test_role_name}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Method name is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role name is not set')
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Role name is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method does not exist')
test_method_name_not_exists = 'test_method_2'
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name_not_exists, 'role': test_role_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role does not exist')
test_role_name_not_exists = 'test_role_4'
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name_not_exists},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Role {test_role_name_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method is not found in role')
test_role_name_without_method = 'test_role_1'
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name_without_method},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name} is not found in role {test_role_name_without_method}'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method is added to role')
resp = await client.post(
'/delete_method_from_role', json={'method': test_method_name, 'role': test_role_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == f'You successfully deleted method {test_method_name} from role {test_role_name}'
assert not db_session.query(db.MethodRole).filter_by(method_id=test_method.id, role_id=test_role.id).first()
logger.info('Test is succeeded')
async def test_change_shared_prop(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_method_name = 'test_method_1'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.get('/change_shared_prop', json={'method': test_method_name, 'value': True})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.post('/change_shared_prop', json={'method': test_method_name, 'value': True})
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name, 'value': True}, headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name, 'value': True},
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name, 'value': True},
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method name is not set')
resp = await client.post(
'/change_shared_prop', json={'value': True}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Method name is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Value is not set')
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Value is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Value is not boolean')
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name, 'value': 'test'},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Value should be boolean'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method does not exist')
test_method_name_not_exists = 'test_method_2'
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name_not_exists, 'value': True},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Method {test_method_name_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Method exists')
resp = await client.post(
'/change_shared_prop', json={'method': test_method_name, 'value': True},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == \
f'You successfully changed shared property of method {test_method_name}. Property is enabled'
test_method = db_session.query(db.Method).filter_by(name=test_method_name).first()
assert test_method.shared
logger.info('Test is succeeded')
async def test_change_user_role(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
test_email = 'user1@test.su'
test_role_name = 'test_role_2'
db = DataBase()
db_session = db.create_session()
logger.info('Test request. Method not allowed')
resp = await client.get('/change_user_role', json={'email': test_email, 'role': test_role_name})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.post('/change_user_role', json={'email': test_email, 'role': test_role_name})
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.post(
'/change_user_role', json={'email': test_email, 'role': test_role_name}, headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.post(
'/change_user_role', json={'email': test_email, 'role': test_role_name},
headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.post(
'/change_user_role', json={'email': test_email, 'role': test_role_name},
headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Email is not set')
resp = await client.post(
'/change_user_role', json={'role': test_role_name}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Email is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role name is not set')
resp = await client.post(
'/change_user_role', json={'email': test_email}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Role name is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. User does not exist')
test_email_not_exists = 'user6@test.su'
resp = await client.post(
'/change_user_role', json={'email': test_email_not_exists, 'role': test_role_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'User with email {test_email_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Role does not exist')
test_role_name_not_exists = 'test_role_4'
resp = await client.post(
'/change_user_role', json={'email': test_email, 'role': test_role_name_not_exists},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == f'Role {test_role_name_not_exists} is not found'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. User and role exist')
resp = await client.post(
'/change_user_role', json={'email': test_email, 'role': test_role_name},
headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == \
f'You successfully changed role of user with email {test_email}. New role is {test_role_name}'
test_user = db_session.query(db.User).filter_by(email=test_email).first()
test_role = db_session.query(db.Role).filter_by(name=test_role_name).first()
assert test_user.role_id == test_role.id
logger.info('Test is succeeded')
async def test_change_file_dir(self, client, prepare_data):
client, handler = tuple(client)
session_denied, session_allowed, session_no_role = tuple(prepare_data)
new_test_folder = '../test_folder_2'
logger.info('Test request. Method not allowed')
resp = await client.get('/change_file_dir', json={'path': new_test_folder})
assert resp.status == 405
logger.info('Test is succeeded')
logger.info('Test request. User is not logged in')
resp = await client.post('/change_file_dir', json={'path': new_test_folder})
assert resp.status == 401
assert await resp.text() == 'Unauthorized request'
logger.info('Test is succeeded')
logger.info('Test request. Session expired')
resp = await client.post('/change_file_dir', json={'path': new_test_folder}, headers={'Authorization': 'test'})
assert resp.status == 401
assert await resp.text() == 'Session expired. Please, sign in again'
logger.info('Test is succeeded')
logger.info('Test request. Access denied')
resp = await client.post(
'/change_file_dir', json={'path': new_test_folder}, headers={'Authorization': session_denied.uuid})
assert resp.status == 403
assert await resp.text() == 'Access denied'
logger.info('Test is succeeded')
logger.info('Test request. User without role')
resp = await client.post(
'/change_file_dir', json={'path': new_test_folder}, headers={'Authorization': session_no_role.uuid})
assert resp.status == 403
assert await resp.text() == 'User is not attached to role'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Directory path is not set')
resp = await client.post('/change_file_dir', json={}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 400
assert await resp.text() == 'Directory path is not set'
logger.info('Test is succeeded')
logger.info('Test request. Access allowed. Directory path is set')
resp = await client.post(
'/change_file_dir', json={'path': new_test_folder}, headers={'Authorization': session_allowed.uuid})
assert resp.status == 200
result = json.loads(await resp.text())
assert result.get('status') == 'success'
assert result.get('message') == \
f'You successfully changed working directory path. New path is {new_test_folder}'
assert handler.file_service.path == new_test_folder
assert handler.file_service_signed.path == new_test_folder
logger.info('Test is succeeded')
| 47.726279
| 120
| 0.64673
| 10,660
| 82,996
| 4.857317
| 0.024296
| 0.067209
| 0.093551
| 0.070164
| 0.929392
| 0.913672
| 0.899168
| 0.881458
| 0.8741
| 0.860214
| 0
| 0.014467
| 0.226312
| 82,996
| 1,738
| 121
| 47.75374
| 0.791896
| 0.000627
| 0
| 0.73506
| 0
| 0.000664
| 0.306917
| 0.057909
| 0
| 0
| 0
| 0
| 0.267596
| 1
| 0.00332
| false
| 0.024568
| 0.005976
| 0
| 0.010624
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9bb892ec8000e6a3112b058687d40b42dc6a2a1d
| 160
|
py
|
Python
|
metsuke/__init__.py
|
shunjuu/Metsuke
|
57a52276f463adbcce40d3ff48eb539a81ce85eb
|
[
"MIT"
] | null | null | null |
metsuke/__init__.py
|
shunjuu/Metsuke
|
57a52276f463adbcce40d3ff48eb539a81ce85eb
|
[
"MIT"
] | null | null | null |
metsuke/__init__.py
|
shunjuu/Metsuke
|
57a52276f463adbcce40d3ff48eb539a81ce85eb
|
[
"MIT"
] | null | null | null |
from .metsuke import validate, validate_feeditem, validate_job
from .metsuke import generate, generate_feeditem, generate_job
from .metsuke import FeedItem, Job
| 53.333333
| 62
| 0.85
| 21
| 160
| 6.285714
| 0.333333
| 0.25
| 0.386364
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 160
| 3
| 63
| 53.333333
| 0.916667
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
fd5e142d80e758d56e69b9ff9d065b274d3be72a
| 20,523
|
py
|
Python
|
src/jk_sysinfo/get_lshw.py
|
jkpubsrc/python-module-jk-sysinfo
|
583c9e5d10f64a722ffa794d081aaf94354ba4fb
|
[
"Apache-1.1"
] | null | null | null |
src/jk_sysinfo/get_lshw.py
|
jkpubsrc/python-module-jk-sysinfo
|
583c9e5d10f64a722ffa794d081aaf94354ba4fb
|
[
"Apache-1.1"
] | null | null | null |
src/jk_sysinfo/get_lshw.py
|
jkpubsrc/python-module-jk-sysinfo
|
583c9e5d10f64a722ffa794d081aaf94354ba4fb
|
[
"Apache-1.1"
] | null | null | null |
import json
import re
from jk_cachefunccalls import cacheCalls
from .parsing_utils import *
from .invoke_utils import run
def _isObj(data:dict, filter:dict) -> bool:
assert isinstance(data, dict)
assert isinstance(filter, dict)
for k, v in filter.items():
if k in data:
# 1st attempt
v2 = data[k]
if v != v2:
return False
else:
if k.startswith("_"):
# 2nd attempt
k = k[1:]
if k in data:
v2 = data[k]
if v != v2:
return False
else:
return False
else:
return False
return True
#
def _findAllR(d:dict, **kwargs):
assert isinstance(d, dict)
for key, data in d.items():
if isinstance(data, (list, tuple)):
for e in data:
if isinstance(e, dict):
if _isObj(e, kwargs):
yield e
for e in data:
if isinstance(e, dict):
yield from _findAllR(e, **kwargs)
elif isinstance(data, dict):
if _isObj(data, kwargs):
yield data
yield from _findAllR(data, **kwargs)
#
#
# Returns:
# {
# "capabilities": {
# "vsyscall32": "32-bit processes"
# },
# "children": [
# {
# "children": [
# {
# "claimed": true,
# "class": "memory",
# "description": "System memory",
# "id": "memory",
# "physid": "0",
# "size": 32635547648,
# "units": "bytes"
# },
# {
# "businfo": "cpu@0",
# "capabilities": {
# "3dnowprefetch": true,
# "abm": true,
# "acpi": "thermal control (ACPI)",
# "adx": true,
# "aes": true,
# ...
# },
# "capacity": 3900000000,
# "claimed": true,
# "class": "processor",
# "id": "cpu",
# "physid": "1",
# "product": "Intel(R) Core(TM) i5-6600 CPU @ 3.30GHz",
# "size": 799992000,
# "units": "Hz",
# "vendor": "Intel Corp.",
# "width": 64
# },
# {
# "businfo": "pci@0000:00:00.0",
# "children": [
# {
# "businfo": "pci@0000:00:02.0",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "rom": "extension ROM",
# "vga_controller": true
# },
# "claimed": true,
# "class": "display",
# "clock": 33000000,
# "configuration": {
# "driver": "i915_bpo",
# "latency": "0"
# },
# "description": "VGA compatible controller",
# "handle": "PCI:0000:00:02.0",
# "id": "display",
# "physid": "2",
# "product": "HD Graphics 530",
# "vendor": "Intel Corporation",
# "version": "06",
# "width": 64
# },
# {
# "businfo": "pci@0000:00:08.0",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing"
# },
# "class": "generic",
# "clock": 33000000,
# "configuration": {
# "latency": "0"
# },
# "description": "System peripheral",
# "handle": "PCI:0000:00:08.0",
# "id": "generic:0",
# "physid": "8",
# "product": "Xeon E3-1200 v5/v6 / E3-1500 v5 / 6th/7th Gen Core Processor Gaussian Mixture Model",
# "vendor": "Intel Corporation",
# "version": "00",
# "width": 64
# },
# {
# "businfo": "pci@0000:00:14.0",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "xhci": true
# },
# "claimed": true,
# "class": "bus",
# "clock": 33000000,
# "configuration": {
# "driver": "xhci_hcd",
# "latency": "0"
# },
# "description": "USB controller",
# "handle": "PCI:0000:00:14.0",
# "id": "usb",
# "physid": "14",
# "product": "100 Series/C230 Series Chipset Family USB 3.0 xHCI Controller",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 64
# },
# ...,
# {
# "businfo": "pci@0000:00:17.0",
# "capabilities": {
# "ahci_1.0": true,
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "storage": true
# },
# "claimed": true,
# "class": "storage",
# "clock": 66000000,
# "configuration": {
# "driver": "ahci",
# "latency": "0"
# },
# "description": "SATA controller",
# "handle": "PCI:0000:00:17.0",
# "id": "storage",
# "physid": "17",
# "product": "Q170/Q150/B150/H170/H110/Z170/CM236 Chipset SATA Controller [AHCI Mode]",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 32
# },
# ...,
# {
# "businfo": "pci@0000:00:1f.2",
# "capabilities": {
# "bus_master": "bus mastering"
# },
# "class": "memory",
# "clock": 33000000,
# "configuration": {
# "latency": "0"
# },
# "description": "Memory controller",
# "handle": "PCI:0000:00:1f.2",
# "id": "memory",
# "physid": "1f.2",
# "product": "100 Series/C230 Series Chipset Family Power Management Controller",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 32
# },
# {
# "businfo": "pci@0000:00:1f.3",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing"
# },
# "claimed": true,
# "class": "multimedia",
# "clock": 33000000,
# "configuration": {
# "driver": "snd_hda_intel",
# "latency": "32"
# },
# "description": "Audio device",
# "handle": "PCI:0000:00:1f.3",
# "id": "multimedia",
# "physid": "1f.3",
# "product": "100 Series/C230 Series Chipset Family HD Audio Controller",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 64
# },
# ...,
# {
# "businfo": "pci@0000:00:1f.6",
# "capabilities": {
# "1000bt-fd": "1Gbit/s (full duplex)",
# "100bt": "100Mbit/s",
# "100bt-fd": "100Mbit/s (full duplex)",
# "10bt": "10Mbit/s",
# "10bt-fd": "10Mbit/s (full duplex)",
# "autonegotiation": "Auto-negotiation",
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "ethernet": true,
# "physical": "Physical interface",
# "tp": "twisted pair"
# },
# "capacity": 1000000000,
# "claimed": true,
# "class": "network",
# "clock": 33000000,
# "configuration": {
# "autonegotiation": "on",
# "broadcast": "yes",
# "driver": "e1000e",
# "driverversion": "3.2.6-k",
# "duplex": "full",
# "firmware": "0.7-4",
# "latency": "0",
# "link": "yes",
# "multicast": "yes",
# "port": "twisted pair",
# "speed": "1Gbit/s"
# },
# "description": "Ethernet interface",
# "handle": "PCI:0000:00:1f.6",
# "id": "network",
# "logicalname": "enp0s31f6",
# "physid": "1f.6",
# "product": "Ethernet Connection (2) I219-V",
# "serial": "d8:cb:8a:ec:5f:05",
# "size": 1000000000,
# "units": "bit/s",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 32
# }
# ],
# "claimed": true,
# "class": "bridge",
# "clock": 33000000,
# "description": "Host bridge",
# "handle": "PCIBUS:0000:00",
# "id": "pci",
# "physid": "100",
# "product": "Xeon E3-1200 v5/E3-1500 v5/6th Gen Core Processor Host Bridge/DRAM Registers",
# "vendor": "Intel Corporation",
# "version": "07",
# "width": 32
# },
# {
# "capabilities": {
# "emulated": "Emulated device"
# },
# "children": [
# {
# "businfo": "scsi@3:0.0.0",
# "capabilities": {
# "audio": "Audio CD playback",
# "cd-r": "CD-R burning",
# "cd-rw": "CD-RW burning",
# "dvd": "DVD playback",
# "dvd-r": "DVD-R burning",
# "dvd-ram": "DVD-RAM burning",
# "removable": "support is removable"
# },
# "children": [
# {
# "claimed": true,
# "class": "disk",
# "configuration": {
# "mount.fstype": "iso9660",
# "mount.options": "ro,nosuid,nodev,relatime,uid=1000,gid=1000,iocharset=utf8,mode=0400,dmode=0500",
# "state": "mounted"
# },
# "dev": "11:0",
# "id": "medium",
# "logicalname": [
# "/dev/cdrom",
# "/media/xxxxxxxx/YYYYYY"
# ],
# "physid": "0"
# }
# ],
# "claimed": true,
# "class": "disk",
# "configuration": {
# "ansiversion": "5",
# "mount.fstype": "iso9660",
# "mount.options": "ro,nosuid,nodev,relatime,uid=1000,gid=1000,iocharset=utf8,mode=0400,dmode=0500",
# "state": "mounted",
# "status": "ready"
# },
# "description": "DVD-RAM writer",
# "dev": "11:0",
# "handle": "SCSI:03:00:00:00",
# "id": "cdrom",
# "logicalname": [
# "/dev/cdrom",
# "/dev/cdrw",
# "/dev/dvd",
# "/dev/dvdrw",
# "/dev/sr0",
# ],
# "physid": "0.0.0",
# "product": "CDDVDW SH-S203P",
# "vendor": "TSSTcorp",
# "version": "SB00"
# }
# ],
# "claimed": true,
# "class": "storage",
# "id": "scsi",
# "logicalname": "scsi3",
# "physid": "2"
# }
# ],
# "claimed": true,
# "class": "bus",
# "description": "Motherboard",
# "id": "core",
# "physid": "0"
# }
# ],
# "claimed": true,
# "class": "system",
# "description": "Computer",
# "id": "nbxxxxxxxx",
# "width": 64
# }
#
def parse_lshw(stdout:str, stderr:str, exitcode:int) -> dict:
try:
data_lshw = json.loads(stdout)
except json.decoder.JSONDecodeError as ee:
raise Exception("JSON parsing error. Please upgrade lshw as your OS seems to use a very old version of lshw.")
if isinstance(data_lshw, list):
assert len(data_lshw) == 1
data_lshw = data_lshw[0]
assert isinstance(data_lshw, dict)
# enrich with additional information: network
for network in _findAllR(data_lshw, id="network"):
if ("capabilities" in network) and network["capabilities"].get("tp"):
# regular twisted pair network
maxSpeedInBitsPerSecond = None
for key in network["capabilities"].keys():
m = re.match(r"^(\d+)bt(-fd)?$", key)
if m:
x = int(m.groups()[0]) * 1000000
if (maxSpeedInBitsPerSecond is None) or (x > maxSpeedInBitsPerSecond):
maxSpeedInBitsPerSecond = x
if maxSpeedInBitsPerSecond is None:
if network.get("size"):
maxSpeedInBitsPerSecond = int(network["size"])
if maxSpeedInBitsPerSecond:
network["maxSpeedInBitsPerSecond"] = maxSpeedInBitsPerSecond
# enrich with additional information: cpu
for cpu in _findAllR(data_lshw, id="cpu"):
if "capabilities" in cpu:
cpu["hyperthreading"] = "ht" in cpu["capabilities"]
cpu["virtualization"] = "vmx" in cpu["capabilities"]
cpu["bitArch"] = 64 if "x86-64" in cpu["capabilities"] else 32
cpu["encryption"] = "aes" in cpu["capabilities"]
# return data
return data_lshw
#
#
# Returns:
# {
# "capabilities": {
# "vsyscall32": "32-bit processes"
# },
# "children": [
# {
# "children": [
# {
# "claimed": true,
# "class": "memory",
# "description": "System memory",
# "id": "memory",
# "physid": "0",
# "size": 32635547648,
# "units": "bytes"
# },
# {
# "businfo": "cpu@0",
# "capabilities": {
# "3dnowprefetch": true,
# "abm": true,
# "acpi": "thermal control (ACPI)",
# "adx": true,
# "aes": true,
# ...
# },
# "capacity": 3900000000,
# "claimed": true,
# "class": "processor",
# "id": "cpu",
# "physid": "1",
# "product": "Intel(R) Core(TM) i5-6600 CPU @ 3.30GHz",
# "size": 799992000,
# "units": "Hz",
# "vendor": "Intel Corp.",
# "width": 64
# },
# {
# "businfo": "pci@0000:00:00.0",
# "children": [
# {
# "businfo": "pci@0000:00:02.0",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "rom": "extension ROM",
# "vga_controller": true
# },
# "claimed": true,
# "class": "display",
# "clock": 33000000,
# "configuration": {
# "driver": "i915_bpo",
# "latency": "0"
# },
# "description": "VGA compatible controller",
# "handle": "PCI:0000:00:02.0",
# "id": "display",
# "physid": "2",
# "product": "HD Graphics 530",
# "vendor": "Intel Corporation",
# "version": "06",
# "width": 64
# },
# {
# "businfo": "pci@0000:00:08.0",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing"
# },
# "class": "generic",
# "clock": 33000000,
# "configuration": {
# "latency": "0"
# },
# "description": "System peripheral",
# "handle": "PCI:0000:00:08.0",
# "id": "generic:0",
# "physid": "8",
# "product": "Xeon E3-1200 v5/v6 / E3-1500 v5 / 6th/7th Gen Core Processor Gaussian Mixture Model",
# "vendor": "Intel Corporation",
# "version": "00",
# "width": 64
# },
# {
# "businfo": "pci@0000:00:14.0",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "xhci": true
# },
# "claimed": true,
# "class": "bus",
# "clock": 33000000,
# "configuration": {
# "driver": "xhci_hcd",
# "latency": "0"
# },
# "description": "USB controller",
# "handle": "PCI:0000:00:14.0",
# "id": "usb",
# "physid": "14",
# "product": "100 Series/C230 Series Chipset Family USB 3.0 xHCI Controller",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 64
# },
# ...,
# {
# "businfo": "pci@0000:00:17.0",
# "capabilities": {
# "ahci_1.0": true,
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "storage": true
# },
# "claimed": true,
# "class": "storage",
# "clock": 66000000,
# "configuration": {
# "driver": "ahci",
# "latency": "0"
# },
# "description": "SATA controller",
# "handle": "PCI:0000:00:17.0",
# "id": "storage",
# "physid": "17",
# "product": "Q170/Q150/B150/H170/H110/Z170/CM236 Chipset SATA Controller [AHCI Mode]",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 32
# },
# ...,
# {
# "businfo": "pci@0000:00:1f.2",
# "capabilities": {
# "bus_master": "bus mastering"
# },
# "class": "memory",
# "clock": 33000000,
# "configuration": {
# "latency": "0"
# },
# "description": "Memory controller",
# "handle": "PCI:0000:00:1f.2",
# "id": "memory",
# "physid": "1f.2",
# "product": "100 Series/C230 Series Chipset Family Power Management Controller",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 32
# },
# {
# "businfo": "pci@0000:00:1f.3",
# "capabilities": {
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing"
# },
# "claimed": true,
# "class": "multimedia",
# "clock": 33000000,
# "configuration": {
# "driver": "snd_hda_intel",
# "latency": "32"
# },
# "description": "Audio device",
# "handle": "PCI:0000:00:1f.3",
# "id": "multimedia",
# "physid": "1f.3",
# "product": "100 Series/C230 Series Chipset Family HD Audio Controller",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 64
# },
# ...,
# {
# "businfo": "pci@0000:00:1f.6",
# "capabilities": {
# "1000bt-fd": "1Gbit/s (full duplex)",
# "100bt": "100Mbit/s",
# "100bt-fd": "100Mbit/s (full duplex)",
# "10bt": "10Mbit/s",
# "10bt-fd": "10Mbit/s (full duplex)",
# "autonegotiation": "Auto-negotiation",
# "bus_master": "bus mastering",
# "cap_list": "PCI capabilities listing",
# "ethernet": true,
# "physical": "Physical interface",
# "tp": "twisted pair"
# },
# "capacity": 1000000000,
# "claimed": true,
# "class": "network",
# "clock": 33000000,
# "configuration": {
# "autonegotiation": "on",
# "broadcast": "yes",
# "driver": "e1000e",
# "driverversion": "3.2.6-k",
# "duplex": "full",
# "firmware": "0.7-4",
# "latency": "0",
# "link": "yes",
# "multicast": "yes",
# "port": "twisted pair",
# "speed": "1Gbit/s"
# },
# "description": "Ethernet interface",
# "handle": "PCI:0000:00:1f.6",
# "id": "network",
# "logicalname": "enp0s31f6",
# "physid": "1f.6",
# "product": "Ethernet Connection (2) I219-V",
# "serial": "d8:cb:8a:ec:5f:05",
# "size": 1000000000,
# "units": "bit/s",
# "vendor": "Intel Corporation",
# "version": "31",
# "width": 32
# }
# ],
# "claimed": true,
# "class": "bridge",
# "clock": 33000000,
# "description": "Host bridge",
# "handle": "PCIBUS:0000:00",
# "id": "pci",
# "physid": "100",
# "product": "Xeon E3-1200 v5/E3-1500 v5/6th Gen Core Processor Host Bridge/DRAM Registers",
# "vendor": "Intel Corporation",
# "version": "07",
# "width": 32
# },
# {
# "capabilities": {
# "emulated": "Emulated device"
# },
# "children": [
# {
# "businfo": "scsi@3:0.0.0",
# "capabilities": {
# "audio": "Audio CD playback",
# "cd-r": "CD-R burning",
# "cd-rw": "CD-RW burning",
# "dvd": "DVD playback",
# "dvd-r": "DVD-R burning",
# "dvd-ram": "DVD-RAM burning",
# "removable": "support is removable"
# },
# "children": [
# {
# "claimed": true,
# "class": "disk",
# "configuration": {
# "mount.fstype": "iso9660",
# "mount.options": "ro,nosuid,nodev,relatime,uid=1000,gid=1000,iocharset=utf8,mode=0400,dmode=0500",
# "state": "mounted"
# },
# "dev": "11:0",
# "id": "medium",
# "logicalname": [
# "/dev/cdrom",
# "/media/xxxxxxxx/YYYYYY"
# ],
# "physid": "0"
# }
# ],
# "claimed": true,
# "class": "disk",
# "configuration": {
# "ansiversion": "5",
# "mount.fstype": "iso9660",
# "mount.options": "ro,nosuid,nodev,relatime,uid=1000,gid=1000,iocharset=utf8,mode=0400,dmode=0500",
# "state": "mounted",
# "status": "ready"
# },
# "description": "DVD-RAM writer",
# "dev": "11:0",
# "handle": "SCSI:03:00:00:00",
# "id": "cdrom",
# "logicalname": [
# "/dev/cdrom",
# "/dev/cdrw",
# "/dev/dvd",
# "/dev/dvdrw",
# "/dev/sr0",
# "/media/xxxxxxxx/YYYYYY"
# ],
# "physid": "0.0.0",
# "product": "CDDVDW SH-S203P",
# "vendor": "TSSTcorp",
# "version": "SB00"
# }
# ],
# "claimed": true,
# "class": "storage",
# "id": "scsi",
# "logicalname": "scsi3",
# "physid": "2"
# }
# ],
# "claimed": true,
# "class": "bus",
# "description": "Motherboard",
# "id": "core",
# "physid": "0"
# }
# ],
# "claimed": true,
# "class": "system",
# "description": "Computer",
# "id": "nbxxxxxxxx",
# "width": 64
# }
#
@cacheCalls(seconds=3, dependArgs=[0])
def get_lshw(c = None) -> dict:
stdout, stderr, exitcode = run(c, "/usr/bin/lshw -json")
return parse_lshw(stdout, stderr, exitcode)
#
| 27.364
| 112
| 0.472202
| 1,928
| 20,523
| 4.994295
| 0.171681
| 0.01994
| 0.02804
| 0.026586
| 0.836743
| 0.827916
| 0.827916
| 0.827916
| 0.822308
| 0.8167
| 0
| 0.077532
| 0.32315
| 20,523
| 749
| 113
| 27.400534
| 0.61565
| 0.817278
| 0
| 0.22973
| 0
| 0
| 0.104617
| 0.007427
| 0
| 0
| 0
| 0
| 0.067568
| 1
| 0.054054
| false
| 0
| 0.067568
| 0
| 0.216216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5dd8bccba6d2fe79915300c4d361687a287e1b9
| 3,051
|
py
|
Python
|
distance_distribution.py
|
explorer2326/Stance-Detection-Fake-News-Challenge
|
3eb473aa5d7973cb9901abc1f91f50d58a235200
|
[
"Apache-2.0"
] | null | null | null |
distance_distribution.py
|
explorer2326/Stance-Detection-Fake-News-Challenge
|
3eb473aa5d7973cb9901abc1f91f50d58a235200
|
[
"Apache-2.0"
] | null | null | null |
distance_distribution.py
|
explorer2326/Stance-Detection-Fake-News-Challenge
|
3eb473aa5d7973cb9901abc1f91f50d58a235200
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 8 08:42:58 2018
@author: Adam
"""
import numpy as np
import matplotlib
stance_index = np.load('data/stance_index.npy')
word2vec_cosine_similarity = np.load( 'data/word2vec_cosine_similarity.npy' )
tf_idf_similarity = np.load('data/tf_idf_similarity.npy')
lm_kl_divergence = np.load('data/lm_kl_divergence.npy')
#%%
def stance_plot(id):
stance = stance_index.item().get(id)
if stance == 'agree':
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), tf_idf_similarity.item().get(id),c = 'r',marker='+',s=5)
elif stance == 'disagree':
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), tf_idf_similarity.item().get(id),c = 'b',marker='+',s=5)
elif stance == 'discuss':
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), tf_idf_similarity.item().get(id),c = 'g',marker='+',s=5)
else:
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), tf_idf_similarity.item().get(id),c = 'y',marker='+',s=5)
'''
'''
#for index in range(len(stance_index.item())):
for index in range(1000):
stance_plot(index)
plt.savefig('overall.jpeg',format='jpeg', dpi=1200)
#%%
def stance_plot2(id):
stance = stance_index.item().get(id)
if stance == 'agree':
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'r',s=5,marker='+')
elif stance == 'disagree':
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'b',s=5,marker='+')
elif stance == 'discuss':
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'g',s=5,marker='+')
else:
matplotlib.pyplot.scatter(word2vec_cosine_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'y',s=5,marker='+')
'''
'''
#for index in range(len(stance_index.item())):
for index in range(1000):
stance_plot2(index)
plt.savefig('overall2.jpeg',format='jpeg', dpi=1200)
#%%
def stance_plot3(id):
stance = stance_index.item().get(id)
if stance == 'agree':
matplotlib.pyplot.scatter(tf_idf_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'r',s=5,marker='+')
elif stance == 'disagree':
matplotlib.pyplot.scatter(tf_idf_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'b',s=5,marker='+')
elif stance == 'discuss':
matplotlib.pyplot.scatter(tf_idf_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'g',s=5,marker='+')
else:
matplotlib.pyplot.scatter(tf_idf_similarity.item().get(id), lm_kl_divergence.item().get(id),c = 'y',s=5,marker='+')
'''
'''
#for index in range(len(stance_index.item())):
for index in range(1000):
stance_plot3(index)
plt.savefig('overall3.jpeg',format='jpeg', dpi=1200)
| 38.620253
| 138
| 0.644707
| 430
| 3,051
| 4.402326
| 0.15814
| 0.099842
| 0.128368
| 0.160592
| 0.800317
| 0.778658
| 0.778658
| 0.746962
| 0.746962
| 0.746962
| 0
| 0.025227
| 0.168469
| 3,051
| 79
| 139
| 38.620253
| 0.72093
| 0.071124
| 0
| 0.4
| 0
| 0
| 0.08893
| 0.039483
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.044444
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5f07ad032c1513b8d2825ff8f54be984dda00eb
| 31,613
|
py
|
Python
|
annoworkapi/generated_api.py
|
kurusugawa-computer/annowork-api-python-client-draft
|
40ee4481f763bbff15f28a93f7e028f25a744dab
|
[
"MIT"
] | null | null | null |
annoworkapi/generated_api.py
|
kurusugawa-computer/annowork-api-python-client-draft
|
40ee4481f763bbff15f28a93f7e028f25a744dab
|
[
"MIT"
] | null | null | null |
annoworkapi/generated_api.py
|
kurusugawa-computer/annowork-api-python-client-draft
|
40ee4481f763bbff15f28a93f7e028f25a744dab
|
[
"MIT"
] | null | null | null |
# flake8: noqa: W291
# pylint: disable=too-many-lines,trailing-whitespace
"""
AbstractAnnoworkApiのヘッダ部分
Note:
このファイルはopenapi-generatorで自動生成される。詳細は generate/README.mdを参照
"""
from __future__ import annotations
import abc
import warnings # pylint: disable=unused-import
from typing import Any, Optional, Union # pylint: disable=unused-import
import annoworkapi # pylint: disable=unused-import
class AbstractAnnoworkApi(abc.ABC):
"""
AnnoworkApiクラスの抽象クラス
"""
@abc.abstractmethod
def _request_wrapper(
self,
http_method: str,
url_path: str,
*,
query_params: Optional[dict[str, Any]] = None,
header_params: Optional[dict[str, Any]] = None,
request_body: Optional[Any] = None,
log_response_with_error: bool = True,
) -> Any:
pass
#########################################
# Public Method : AccountApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def confirm_reset_password(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""パスワードリセットstep2(新しいパスワードに変更)
新しいパスワードに変更します。 本人確認のため、[パスワードリセットを要求](#operation/resetPassword)で受信したメールに記載された検証コードを使用します。 パスワードリセットプロセスの最終ステップです。
Args:
request_body (Any): Request Body
confirm_reset_password_request (ConfirmResetPasswordRequest): (required)
Returns:
InlineResponse200
"""
url_path = f"/confirm-reset-password"
http_method = "POST"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def confirm_sign_up(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""サインアップstep2(本登録)
アカウントのサインアップの最後のステップとして、アカウントを本登録します。
Args:
request_body (Any): Request Body
confirm_sign_up_request (ConfirmSignUpRequest): (required)
Returns:
InlineResponse200
"""
url_path = f"/confirm-sign-up"
http_method = "POST"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_account_external_linkage_info(self, user_id: str, **kwargs) -> Any:
"""アカウント外部連携情報取得
Args:
user_id (str): ユーザーID (required)
Returns:
InlineResponse2001
"""
url_path = f"/accounts/{user_id}/external-linkage-info"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_account_external_linkage_info(self, user_id: str, request_body: Optional[Any] = None, **kwargs) -> Any:
"""アカウント外部連携情報更新
Args:
user_id (str): ユーザーID (required)
request_body (Any): Request Body
put_account_external_linkage_info_request (PutAccountExternalLinkageInfoRequest): (required)
Returns:
InlineResponse2001
"""
url_path = f"/accounts/{user_id}/external-linkage-info"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def reset_password(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""パスワードリセットstep1(開始)
パスワードリセットに必要な確認コードをメールで送付します。 後続の[新しいパスワードに変更](#operation/confirmResetPassword)を実行することで、新しいパスワードに変更できます。
Args:
request_body (Any): Request Body
reset_password_request (ResetPasswordRequest): (required)
Returns:
InlineResponse200
"""
url_path = f"/reset-password"
http_method = "POST"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def sign_up(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""サインアップstep1(仮登録)
アカウントのサインアップの最初のステップとして、アカウントを仮登録します。 AnnoWorkに未登録のメールアドレスであれば、新規アカウントが仮登録状態で作成され、本登録フローのためのメールが送信されます。 このメールには仮パスワードなどが記載されています。 指定したメールアドレスを使うユーザーが仮登録であれば、本登録フローのメールが再送信されます。 指定したメールアドレスを使うユーザーが本登録であれば、不正なリクエストとしてエラーを返します(本登録が仮登録に戻ることはありません)。
Args:
request_body (Any): Request Body
sign_up_request (SignUpRequest): (required)
Returns:
InlineResponse200
"""
url_path = f"/sign-up"
http_method = "POST"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : ActualWorkingTimeApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def delete_actual_working_time_by_workspace_member(
self, workspace_id: str, workspace_member_id: str, actual_working_time_id: str, **kwargs
) -> Any:
"""実績時間の削除
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
actual_working_time_id (str): 実績稼働時間ID (required)
Returns:
ActualWorkingTime
"""
url_path = (
f"/workspaces/{workspace_id}/members/{workspace_member_id}/actual-working-times/{actual_working_time_id}"
)
http_method = "DELETE"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_actual_working_times(
self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs
) -> Any:
"""ワークスペース全体の実績時間の一括取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
job_id (str): ジョブID
term_start (str): 日付での範囲検索で使用
term_end (str): 日付での範囲検索で使用
Returns:
[ActualWorkingTime]
"""
url_path = f"/workspaces/{workspace_id}/actual-working-times"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_actual_working_times_by_workspace_member(
self, workspace_id: str, workspace_member_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs
) -> Any:
"""ワークスペースメンバーに対する実績時間の一括取得
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
query_params (dict[str, Any]): Query Parameters
term_start (str): 取得する範囲の開始日時。日付での範囲検索で使用
term_end (str): 取得する範囲の終了日時。日付での範囲検索で使用
Returns:
[ActualWorkingTime]
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}/actual-working-times"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_sum_of_actual_working_times(
self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs
) -> Any:
"""ワークスペース全体の実績時間の合計取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
job_id (str): ジョブID
includes_archived_job (bool): アーカイブ化したジョブの合計も含めるかどうか
Returns:
SumOfTimes
"""
url_path = f"/workspaces/{workspace_id}/sum-of-actual-working-times"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_actual_working_time_by_workspace_member(
self,
workspace_id: str,
workspace_member_id: str,
actual_working_time_id: str,
request_body: Optional[Any] = None,
**kwargs,
) -> Any:
"""実績時間の更新
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
actual_working_time_id (str): 実績稼働時間ID (required)
request_body (Any): Request Body
put_actual_working_time_request (PutActualWorkingTimeRequest): (required)
Returns:
ActualWorkingTime
"""
url_path = (
f"/workspaces/{workspace_id}/members/{workspace_member_id}/actual-working-times/{actual_working_time_id}"
)
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : ExpectedWorkingTimeApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def delete_expected_working_time_by_workspace_member(
self, workspace_id: str, workspace_member_id: str, date: str, **kwargs
) -> Any:
"""予定稼働時間の日付指定削除
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
date (str): 予定の対象日 (required)
Returns:
ExpectedWorkingTime
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}/expected-working-times/{date}"
http_method = "DELETE"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_expected_working_times(
self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs
) -> Any:
"""予定稼働時間の一括取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
term_start (str): 日付での範囲検索で使用
term_end (str): 日付での範囲検索で使用
Returns:
[ExpectedWorkingTime]
"""
url_path = f"/workspaces/{workspace_id}/expected-working-times"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_expected_working_times_by_workspace_member(
self, workspace_id: str, workspace_member_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs
) -> Any:
"""予定稼働時間の一覧取得
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
query_params (dict[str, Any]): Query Parameters
term_start (str): 取得する範囲の開始日。日付での範囲検索で使用
term_end (str): 取得する範囲の終了日。日付での範囲検索で使用
Returns:
[ExpectedWorkingTime]
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}/expected-working-times"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_expected_working_time_by_workspace_member(
self, workspace_id: str, workspace_member_id: str, date: str, request_body: Optional[Any] = None, **kwargs
) -> Any:
"""予定稼働時間の日付指定更新
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
date (str): 予定の対象日 (required)
request_body (Any): Request Body
put_expected_working_time_request (PutExpectedWorkingTimeRequest): (required)
Returns:
ExpectedWorkingTime
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}/expected-working-times/{date}"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : JobApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def delete_job(self, workspace_id: str, job_id: str, **kwargs) -> Any:
"""ジョブの削除
Args:
workspace_id (str): ワークスペースID (required)
job_id (str): ジョブID (required)
Returns:
Job
"""
url_path = f"/workspaces/{workspace_id}/jobs/{job_id}"
http_method = "DELETE"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_job(self, workspace_id: str, job_id: str, **kwargs) -> Any:
"""ジョブの取得
Args:
workspace_id (str): ワークスペースID (required)
job_id (str): ジョブID (required)
Returns:
Job
"""
url_path = f"/workspaces/{workspace_id}/jobs/{job_id}"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_job_children(self, workspace_id: str, job_id: str, **kwargs) -> Any:
"""子ジョブの一覧取得
Args:
workspace_id (str): ワークスペースID (required)
job_id (str): ジョブID (required)
Returns:
JobChildren
"""
url_path = f"/workspaces/{workspace_id}/jobs/{job_id}/children"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_jobs(self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs) -> Any:
"""ジョブの一覧取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
sort (str): sort key(複数項目を利用したソートの場合は,(カンマ)区切りで指定してください。key(id or name)、降順にしたい場合は先頭に-(ハイフン)を付ける)
Returns:
[Job]
"""
url_path = f"/workspaces/{workspace_id}/jobs"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_job(self, workspace_id: str, job_id: str, request_body: Optional[Any] = None, **kwargs) -> Any:
"""ジョブの更新
Args:
workspace_id (str): ワークスペースID (required)
job_id (str): ジョブID (required)
request_body (Any): Request Body
put_job_request (PutJobRequest): (required)
Returns:
Job
"""
url_path = f"/workspaces/{workspace_id}/jobs/{job_id}"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : LoginApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def post_login(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""ログイン
Args:
request_body (Any): Request Body
login_request (LoginRequest): (required)
Returns:
LoginToken
"""
url_path = f"/login"
http_method = "POST"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : MyApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def change_password(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""パスワード変更
パスワード変更
Args:
request_body (Any): Request Body
change_password_request (ChangePasswordRequest): (required)
Returns:
InlineResponse200
"""
url_path = f"/my/account/password"
http_method = "POST"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_my_account(self, **kwargs) -> Any:
"""ログイン中のアカウント情報を取得する
Args:
Returns:
Account
"""
url_path = f"/my/account"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_my_schedules(self, query_params: Optional[dict[str, Any]] = None, **kwargs) -> Any:
"""自身がアサインされているスケジュール一覧を取得する
Args:
query_params (dict[str, Any]): Query Parameters
workspace_id (str): ワークスペースIDを指定することで対象のワークスペースでアサインされているスケジュールのみを取得できる
term_start (str): 日付での範囲検索で使用
term_end (str): 日付での範囲検索で使用
Returns:
[Schedule]
"""
url_path = f"/my/schedules"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_my_workspace_members(self, query_params: Optional[dict[str, Any]] = None, **kwargs) -> Any:
"""自身のワークスペースメンバー情報一覧を取得する
Args:
query_params (dict[str, Any]): Query Parameters
workspace_id (str): ワークスペースIDを指定することで対象のワークスペースに所属しているワークスペースメンバー情報のみを取得できる
Returns:
[WorkspaceMember]
"""
url_path = f"/my/workspace-members"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_my_workspaces(self, **kwargs) -> Any:
"""自身の所属するワークスペース情報一覧を取得する
Args:
Returns:
[Workspace]
"""
url_path = f"/my/workspaces"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_my_account(self, request_body: Optional[Any] = None, **kwargs) -> Any:
"""アカウント情報更新
Args:
request_body (Any): Request Body
put_my_account_request (PutMyAccountRequest): (required)
Returns:
Account
"""
url_path = f"/my/account"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : ScheduleApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def delete_schedule(self, workspace_id: str, schedule_id: str, **kwargs) -> Any:
"""作業計画の削除
Args:
workspace_id (str): ワークスペースID (required)
schedule_id (str): スケジュールID (required)
Returns:
Schedule
"""
url_path = f"/workspaces/{workspace_id}/schedules/{schedule_id}"
http_method = "DELETE"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_schedule(self, workspace_id: str, schedule_id: str, **kwargs) -> Any:
"""作業計画の取得
Args:
workspace_id (str): ワークスペースID (required)
schedule_id (str): スケジュールID (required)
Returns:
Schedule
"""
url_path = f"/workspaces/{workspace_id}/schedules/{schedule_id}"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_schedules(self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs) -> Any:
"""作業計画の一覧取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
term_start (str): 日付での範囲検索で使用
term_end (str): 日付での範囲検索で使用
job_id (str): ジョブID
Returns:
[Schedule]
"""
url_path = f"/workspaces/{workspace_id}/schedules"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_sum_of_schedules(self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs) -> Any:
"""ワークスペース全体のスケジュール時間の合計取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
job_id (str): ジョブID
includes_archived_job (bool): アーカイブ化したジョブの合計も含めるかどうか
Returns:
SumOfTimes
"""
url_path = f"/workspaces/{workspace_id}/sum-of-schedules"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_schedule(self, workspace_id: str, schedule_id: str, request_body: Optional[Any] = None, **kwargs) -> Any:
"""作業計画の更新
Args:
workspace_id (str): ワークスペースID (required)
schedule_id (str): スケジュールID (required)
request_body (Any): Request Body
put_schedule_request (PutScheduleRequest): (required)
Returns:
Schedule
"""
url_path = f"/workspaces/{workspace_id}/schedules/{schedule_id}"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : WorkspaceApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def get_workspace(self, workspace_id: str, **kwargs) -> Any:
"""ワークスペースの取得
Args:
workspace_id (str): ワークスペースID (required)
Returns:
Workspace
"""
url_path = f"/workspaces/{workspace_id}"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_workspace_tag(self, workspace_id: str, workspace_tag_id: str, **kwargs) -> Any:
"""ワークスペースタグの取得
Args:
workspace_id (str): ワークスペースID (required)
workspace_tag_id (str): ワークスペースタグID (required)
Returns:
WorkspaceTag
"""
url_path = f"/workspaces/{workspace_id}/tags/{workspace_tag_id}"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_workspace_tag_members(self, workspace_id: str, workspace_tag_id: str, **kwargs) -> Any:
"""ワークスペースタグに紐付いているワークスペースメンバーの一覧取得
Args:
workspace_id (str): ワークスペースID (required)
workspace_tag_id (str): ワークスペースタグID (required)
Returns:
WorkspaceTagMembers
"""
url_path = f"/workspaces/{workspace_id}/tags/{workspace_tag_id}/members"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_workspace_tags(self, workspace_id: str, **kwargs) -> Any:
"""ワークスペースタグ一覧の取得
Args:
workspace_id (str): ワークスペースID (required)
Returns:
[WorkspaceTag]
"""
url_path = f"/workspaces/{workspace_id}/tags"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_workspace(self, workspace_id: str, request_body: Optional[Any] = None, **kwargs) -> Any:
"""ワークスペースの更新
Args:
workspace_id (str): ワークスペースID (required)
request_body (Any): Request Body
put_workspace_request (PutWorkspaceRequest): (required)
Returns:
Workspace
"""
url_path = f"/workspaces/{workspace_id}"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_workspace_tag(
self, workspace_id: str, workspace_tag_id: str, request_body: Optional[Any] = None, **kwargs
) -> Any:
"""ワークスペースタグの更新
Args:
workspace_id (str): ワークスペースID (required)
workspace_tag_id (str): ワークスペースタグID (required)
request_body (Any): Request Body
put_workspace_tag_request (PutWorkspaceTagRequest): (required)
Returns:
WorkspaceTag
"""
url_path = f"/workspaces/{workspace_id}/tags/{workspace_tag_id}"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
#########################################
# Public Method : WorkspaceMemberApi
# NOTE: This method is auto generated by OpenAPI Generator
#########################################
def delete_workspace_member(self, workspace_id: str, workspace_member_id: str, **kwargs) -> Any:
"""ワークスペースメンバーの削除
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
Returns:
WorkspaceMember
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}"
http_method = "DELETE"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_workspace_member(self, workspace_id: str, workspace_member_id: str, **kwargs) -> Any:
"""ワークスペースメンバーの取得
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
Returns:
WorkspaceMember
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_workspace_member_tags(self, workspace_id: str, workspace_member_id: str, **kwargs) -> Any:
"""ワークスペースメンバーのタグ一覧取得
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
Returns:
WorkspaceMemberTags
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}/tags"
http_method = "GET"
keyword_params: dict[str, Any] = {}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def get_workspace_members(self, workspace_id: str, query_params: Optional[dict[str, Any]] = None, **kwargs) -> Any:
"""ワークスペースメンバー一覧の取得
Args:
workspace_id (str): ワークスペースID (required)
query_params (dict[str, Any]): Query Parameters
sort (str): sort key(降順にしたい場合は先頭に-(ハイフン)を付ける)
includes_inactive_members (bool): 無効化したワークスペースメンバーも含めるかどうか
Returns:
[WorkspaceMember]
"""
url_path = f"/workspaces/{workspace_id}/members"
http_method = "GET"
keyword_params: dict[str, Any] = {
"query_params": query_params,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
def put_workspace_member(
self, workspace_id: str, workspace_member_id: str, request_body: Optional[Any] = None, **kwargs
) -> Any:
"""ワークスペースメンバーの変更
Args:
workspace_id (str): ワークスペースID (required)
workspace_member_id (str): ワークスペースメンバーID (required)
request_body (Any): Request Body
put_workspace_member_request (PutWorkspaceMemberRequest): (required)
Returns:
WorkspaceMember
"""
url_path = f"/workspaces/{workspace_id}/members/{workspace_member_id}"
http_method = "PUT"
keyword_params: dict[str, Any] = {
"request_body": request_body,
}
keyword_params.update(**kwargs)
return self._request_wrapper(http_method, url_path, **keyword_params)
| 27.51349
| 254
| 0.590263
| 3,134
| 31,613
| 5.669432
| 0.080089
| 0.094383
| 0.037708
| 0.048627
| 0.842301
| 0.832452
| 0.809095
| 0.788384
| 0.765928
| 0.720678
| 0
| 0.001373
| 0.285958
| 31,613
| 1,148
| 255
| 27.537456
| 0.785762
| 0.311581
| 0
| 0.674095
| 1
| 0
| 0.126889
| 0.095345
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122563
| false
| 0.019499
| 0.013928
| 0
| 0.259053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5f3133efae6d5e779655d7a98f8095fd0378013
| 57,866
|
py
|
Python
|
eventstore_grpc/proto/cluster_pb2.py
|
jmshnds/eventstore_grpc
|
cf6a5978bcdf5bb725a569b7373ce15ee4babb25
|
[
"MIT"
] | 6
|
2021-02-04T15:48:28.000Z
|
2021-12-26T03:04:26.000Z
|
eventstore_grpc/proto/cluster_pb2.py
|
jmshnds/eventstore_grpc
|
cf6a5978bcdf5bb725a569b7373ce15ee4babb25
|
[
"MIT"
] | 1
|
2021-04-14T00:06:25.000Z
|
2021-04-14T07:28:34.000Z
|
eventstore_grpc/proto/cluster_pb2.py
|
jmshnds/eventstore_grpc
|
cf6a5978bcdf5bb725a569b7373ce15ee4babb25
|
[
"MIT"
] | 1
|
2021-04-14T17:22:54.000Z
|
2021-04-14T17:22:54.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cluster.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from eventstore_grpc.proto import shared_pb2 as shared__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='cluster.proto',
package='event_store.cluster',
syntax='proto3',
serialized_options=b'\n%com.eventstore.dbclient.proto.cluster',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\rcluster.proto\x12\x13\x65vent_store.cluster\x1a\x0cshared.proto\"n\n\rGossipRequest\x12.\n\x04info\x18\x01 \x01(\x0b\x32 .event_store.cluster.ClusterInfo\x12-\n\x06server\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\"\x93\x01\n\x11ViewChangeRequest\x12\x32\n\tserver_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x16\n\x0e\x61ttempted_view\x18\x03 \x01(\x05\"\x98\x01\n\x16ViewChangeProofRequest\x12\x32\n\tserver_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x16\n\x0einstalled_view\x18\x03 \x01(\x05\"\x86\x01\n\x0ePrepareRequest\x12\x32\n\tserver_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x0c\n\x04view\x18\x03 \x01(\x05\"\xcf\x03\n\x10PrepareOkRequest\x12\x0c\n\x04view\x18\x01 \x01(\x05\x12\x32\n\tserver_id\x18\x02 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x03 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x14\n\x0c\x65poch_number\x18\x04 \x01(\x05\x12\x16\n\x0e\x65poch_position\x18\x05 \x01(\x03\x12\x31\n\x08\x65poch_id\x18\x06 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x41\n\x18\x65poch_leader_instance_id\x18\x07 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x1c\n\x14last_commit_position\x18\x08 \x01(\x03\x12\x19\n\x11writer_checkpoint\x18\t \x01(\x03\x12\x19\n\x11\x63haser_checkpoint\x18\n \x01(\x03\x12\x15\n\rnode_priority\x18\x0b \x01(\x05\x12\x36\n\x0c\x63luster_info\x18\x0c \x01(\x0b\x32 .event_store.cluster.ClusterInfo\"\xfe\x03\n\x0fProposalRequest\x12\x32\n\tserver_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x32\n\tleader_id\x18\x03 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bleader_http\x18\x04 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x0c\n\x04view\x18\x05 \x01(\x05\x12\x14\n\x0c\x65poch_number\x18\x06 \x01(\x05\x12\x16\n\x0e\x65poch_position\x18\x07 \x01(\x03\x12\x31\n\x08\x65poch_id\x18\x08 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x41\n\x18\x65poch_leader_instance_id\x18\t \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x1c\n\x14last_commit_position\x18\n \x01(\x03\x12\x19\n\x11writer_checkpoint\x18\x0b \x01(\x03\x12\x19\n\x11\x63haser_checkpoint\x18\x0c \x01(\x03\x12\x15\n\rnode_priority\x18\r \x01(\x05\"\xed\x01\n\rAcceptRequest\x12\x32\n\tserver_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x32\n\tleader_id\x18\x03 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bleader_http\x18\x04 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x0c\n\x04view\x18\x05 \x01(\x05\"\x82\x01\n\x18LeaderIsResigningRequest\x12\x32\n\tleader_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bleader_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\"\xec\x01\n\x1aLeaderIsResigningOkRequest\x12\x32\n\tleader_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bleader_http\x18\x02 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x32\n\tserver_id\x18\x03 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x32\n\x0bserver_http\x18\x04 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\"?\n\x0b\x43lusterInfo\x12\x30\n\x07members\x18\x01 \x03(\x0b\x32\x1f.event_store.cluster.MemberInfo\")\n\x08\x45ndPoint\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\r\"\x81\x08\n\nMemberInfo\x12\x34\n\x0binstance_id\x18\x01 \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x12\n\ntime_stamp\x18\x02 \x01(\x03\x12\x39\n\x05state\x18\x03 \x01(\x0e\x32*.event_store.cluster.MemberInfo.VNodeState\x12\x10\n\x08is_alive\x18\x04 \x01(\x08\x12\x35\n\x0ehttp_end_point\x18\x05 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x33\n\x0cinternal_tcp\x18\x06 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x33\n\x0c\x65xternal_tcp\x18\x07 \x01(\x0b\x32\x1d.event_store.cluster.EndPoint\x12\x1d\n\x15internal_tcp_uses_tls\x18\x08 \x01(\x08\x12\x1d\n\x15\x65xternal_tcp_uses_tls\x18\t \x01(\x08\x12\x1c\n\x14last_commit_position\x18\n \x01(\x03\x12\x19\n\x11writer_checkpoint\x18\x0b \x01(\x03\x12\x19\n\x11\x63haser_checkpoint\x18\x0c \x01(\x03\x12\x16\n\x0e\x65poch_position\x18\r \x01(\x03\x12\x14\n\x0c\x65poch_number\x18\x0e \x01(\x05\x12\x31\n\x08\x65poch_id\x18\x0f \x01(\x0b\x32\x1f.event_store.client.shared.UUID\x12\x15\n\rnode_priority\x18\x10 \x01(\x05\x12\x1c\n\x14is_read_only_replica\x18\x11 \x01(\x08\x12#\n\x1b\x61\x64vertise_host_to_client_as\x18\x12 \x01(\t\x12(\n advertise_http_port_to_client_as\x18\x13 \x01(\r\x12\'\n\x1f\x61\x64vertise_tcp_port_to_client_as\x18\x14 \x01(\r\"\x9a\x02\n\nVNodeState\x12\x10\n\x0cInitializing\x10\x00\x12\x12\n\x0e\x44iscoverLeader\x10\x01\x12\x0b\n\x07Unknown\x10\x02\x12\x0e\n\nPreReplica\x10\x03\x12\x0e\n\nCatchingUp\x10\x04\x12\t\n\x05\x43lone\x10\x05\x12\x0c\n\x08\x46ollower\x10\x06\x12\r\n\tPreLeader\x10\x07\x12\n\n\x06Leader\x10\x08\x12\x0b\n\x07Manager\x10\t\x12\x10\n\x0cShuttingDown\x10\n\x12\x0c\n\x08Shutdown\x10\x0b\x12\x16\n\x12ReadOnlyLeaderless\x10\x0c\x12\x16\n\x12PreReadOnlyReplica\x10\r\x12\x13\n\x0fReadOnlyReplica\x10\x0e\x12\x13\n\x0fResigningLeader\x10\x0f\x32\xa4\x01\n\x06Gossip\x12N\n\x06Update\x12\".event_store.cluster.GossipRequest\x1a .event_store.cluster.ClusterInfo\x12J\n\x04Read\x12 .event_store.client.shared.Empty\x1a .event_store.cluster.ClusterInfo2\xe1\x05\n\tElections\x12V\n\nViewChange\x12&.event_store.cluster.ViewChangeRequest\x1a .event_store.client.shared.Empty\x12`\n\x0fViewChangeProof\x12+.event_store.cluster.ViewChangeProofRequest\x1a .event_store.client.shared.Empty\x12P\n\x07Prepare\x12#.event_store.cluster.PrepareRequest\x1a .event_store.client.shared.Empty\x12T\n\tPrepareOk\x12%.event_store.cluster.PrepareOkRequest\x1a .event_store.client.shared.Empty\x12R\n\x08Proposal\x12$.event_store.cluster.ProposalRequest\x1a .event_store.client.shared.Empty\x12N\n\x06\x41\x63\x63\x65pt\x12\".event_store.cluster.AcceptRequest\x1a .event_store.client.shared.Empty\x12\x64\n\x11LeaderIsResigning\x12-.event_store.cluster.LeaderIsResigningRequest\x1a .event_store.client.shared.Empty\x12h\n\x13LeaderIsResigningOk\x12/.event_store.cluster.LeaderIsResigningOkRequest\x1a .event_store.client.shared.EmptyB\'\n%com.eventstore.dbclient.proto.clusterb\x06proto3'
,
dependencies=[shared__pb2.DESCRIPTOR,])
_MEMBERINFO_VNODESTATE = _descriptor.EnumDescriptor(
name='VNodeState',
full_name='event_store.cluster.MemberInfo.VNodeState',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Initializing', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DiscoverLeader', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Unknown', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PreReplica', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CatchingUp', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Clone', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Follower', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PreLeader', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Leader', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Manager', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ShuttingDown', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Shutdown', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ReadOnlyLeaderless', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PreReadOnlyReplica', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ReadOnlyReplica', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ResigningLeader', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3049,
serialized_end=3331,
)
_sym_db.RegisterEnumDescriptor(_MEMBERINFO_VNODESTATE)
_GOSSIPREQUEST = _descriptor.Descriptor(
name='GossipRequest',
full_name='event_store.cluster.GossipRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='info', full_name='event_store.cluster.GossipRequest.info', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server', full_name='event_store.cluster.GossipRequest.server', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=52,
serialized_end=162,
)
_VIEWCHANGEREQUEST = _descriptor.Descriptor(
name='ViewChangeRequest',
full_name='event_store.cluster.ViewChangeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.ViewChangeRequest.server_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.ViewChangeRequest.server_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attempted_view', full_name='event_store.cluster.ViewChangeRequest.attempted_view', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=165,
serialized_end=312,
)
_VIEWCHANGEPROOFREQUEST = _descriptor.Descriptor(
name='ViewChangeProofRequest',
full_name='event_store.cluster.ViewChangeProofRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.ViewChangeProofRequest.server_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.ViewChangeProofRequest.server_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='installed_view', full_name='event_store.cluster.ViewChangeProofRequest.installed_view', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=315,
serialized_end=467,
)
_PREPAREREQUEST = _descriptor.Descriptor(
name='PrepareRequest',
full_name='event_store.cluster.PrepareRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.PrepareRequest.server_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.PrepareRequest.server_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='view', full_name='event_store.cluster.PrepareRequest.view', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=470,
serialized_end=604,
)
_PREPAREOKREQUEST = _descriptor.Descriptor(
name='PrepareOkRequest',
full_name='event_store.cluster.PrepareOkRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='view', full_name='event_store.cluster.PrepareOkRequest.view', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.PrepareOkRequest.server_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.PrepareOkRequest.server_http', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_number', full_name='event_store.cluster.PrepareOkRequest.epoch_number', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_position', full_name='event_store.cluster.PrepareOkRequest.epoch_position', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_id', full_name='event_store.cluster.PrepareOkRequest.epoch_id', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_leader_instance_id', full_name='event_store.cluster.PrepareOkRequest.epoch_leader_instance_id', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_commit_position', full_name='event_store.cluster.PrepareOkRequest.last_commit_position', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='writer_checkpoint', full_name='event_store.cluster.PrepareOkRequest.writer_checkpoint', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='chaser_checkpoint', full_name='event_store.cluster.PrepareOkRequest.chaser_checkpoint', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_priority', full_name='event_store.cluster.PrepareOkRequest.node_priority', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cluster_info', full_name='event_store.cluster.PrepareOkRequest.cluster_info', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=607,
serialized_end=1070,
)
_PROPOSALREQUEST = _descriptor.Descriptor(
name='ProposalRequest',
full_name='event_store.cluster.ProposalRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.ProposalRequest.server_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.ProposalRequest.server_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='leader_id', full_name='event_store.cluster.ProposalRequest.leader_id', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='leader_http', full_name='event_store.cluster.ProposalRequest.leader_http', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='view', full_name='event_store.cluster.ProposalRequest.view', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_number', full_name='event_store.cluster.ProposalRequest.epoch_number', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_position', full_name='event_store.cluster.ProposalRequest.epoch_position', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_id', full_name='event_store.cluster.ProposalRequest.epoch_id', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_leader_instance_id', full_name='event_store.cluster.ProposalRequest.epoch_leader_instance_id', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_commit_position', full_name='event_store.cluster.ProposalRequest.last_commit_position', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='writer_checkpoint', full_name='event_store.cluster.ProposalRequest.writer_checkpoint', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='chaser_checkpoint', full_name='event_store.cluster.ProposalRequest.chaser_checkpoint', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_priority', full_name='event_store.cluster.ProposalRequest.node_priority', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1073,
serialized_end=1583,
)
_ACCEPTREQUEST = _descriptor.Descriptor(
name='AcceptRequest',
full_name='event_store.cluster.AcceptRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.AcceptRequest.server_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.AcceptRequest.server_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='leader_id', full_name='event_store.cluster.AcceptRequest.leader_id', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='leader_http', full_name='event_store.cluster.AcceptRequest.leader_http', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='view', full_name='event_store.cluster.AcceptRequest.view', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1586,
serialized_end=1823,
)
_LEADERISRESIGNINGREQUEST = _descriptor.Descriptor(
name='LeaderIsResigningRequest',
full_name='event_store.cluster.LeaderIsResigningRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='leader_id', full_name='event_store.cluster.LeaderIsResigningRequest.leader_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='leader_http', full_name='event_store.cluster.LeaderIsResigningRequest.leader_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1826,
serialized_end=1956,
)
_LEADERISRESIGNINGOKREQUEST = _descriptor.Descriptor(
name='LeaderIsResigningOkRequest',
full_name='event_store.cluster.LeaderIsResigningOkRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='leader_id', full_name='event_store.cluster.LeaderIsResigningOkRequest.leader_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='leader_http', full_name='event_store.cluster.LeaderIsResigningOkRequest.leader_http', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_id', full_name='event_store.cluster.LeaderIsResigningOkRequest.server_id', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_http', full_name='event_store.cluster.LeaderIsResigningOkRequest.server_http', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1959,
serialized_end=2195,
)
_CLUSTERINFO = _descriptor.Descriptor(
name='ClusterInfo',
full_name='event_store.cluster.ClusterInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='members', full_name='event_store.cluster.ClusterInfo.members', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2197,
serialized_end=2260,
)
_ENDPOINT = _descriptor.Descriptor(
name='EndPoint',
full_name='event_store.cluster.EndPoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='event_store.cluster.EndPoint.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='port', full_name='event_store.cluster.EndPoint.port', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2262,
serialized_end=2303,
)
_MEMBERINFO = _descriptor.Descriptor(
name='MemberInfo',
full_name='event_store.cluster.MemberInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_id', full_name='event_store.cluster.MemberInfo.instance_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_stamp', full_name='event_store.cluster.MemberInfo.time_stamp', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='event_store.cluster.MemberInfo.state', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_alive', full_name='event_store.cluster.MemberInfo.is_alive', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='http_end_point', full_name='event_store.cluster.MemberInfo.http_end_point', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='internal_tcp', full_name='event_store.cluster.MemberInfo.internal_tcp', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_tcp', full_name='event_store.cluster.MemberInfo.external_tcp', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='internal_tcp_uses_tls', full_name='event_store.cluster.MemberInfo.internal_tcp_uses_tls', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='external_tcp_uses_tls', full_name='event_store.cluster.MemberInfo.external_tcp_uses_tls', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_commit_position', full_name='event_store.cluster.MemberInfo.last_commit_position', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='writer_checkpoint', full_name='event_store.cluster.MemberInfo.writer_checkpoint', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='chaser_checkpoint', full_name='event_store.cluster.MemberInfo.chaser_checkpoint', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_position', full_name='event_store.cluster.MemberInfo.epoch_position', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_number', full_name='event_store.cluster.MemberInfo.epoch_number', index=13,
number=14, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='epoch_id', full_name='event_store.cluster.MemberInfo.epoch_id', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_priority', full_name='event_store.cluster.MemberInfo.node_priority', index=15,
number=16, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_read_only_replica', full_name='event_store.cluster.MemberInfo.is_read_only_replica', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='advertise_host_to_client_as', full_name='event_store.cluster.MemberInfo.advertise_host_to_client_as', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='advertise_http_port_to_client_as', full_name='event_store.cluster.MemberInfo.advertise_http_port_to_client_as', index=18,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='advertise_tcp_port_to_client_as', full_name='event_store.cluster.MemberInfo.advertise_tcp_port_to_client_as', index=19,
number=20, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_MEMBERINFO_VNODESTATE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2306,
serialized_end=3331,
)
_GOSSIPREQUEST.fields_by_name['info'].message_type = _CLUSTERINFO
_GOSSIPREQUEST.fields_by_name['server'].message_type = _ENDPOINT
_VIEWCHANGEREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_VIEWCHANGEREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_VIEWCHANGEPROOFREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_VIEWCHANGEPROOFREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_PREPAREREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_PREPAREREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_PREPAREOKREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_PREPAREOKREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_PREPAREOKREQUEST.fields_by_name['epoch_id'].message_type = shared__pb2._UUID
_PREPAREOKREQUEST.fields_by_name['epoch_leader_instance_id'].message_type = shared__pb2._UUID
_PREPAREOKREQUEST.fields_by_name['cluster_info'].message_type = _CLUSTERINFO
_PROPOSALREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_PROPOSALREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_PROPOSALREQUEST.fields_by_name['leader_id'].message_type = shared__pb2._UUID
_PROPOSALREQUEST.fields_by_name['leader_http'].message_type = _ENDPOINT
_PROPOSALREQUEST.fields_by_name['epoch_id'].message_type = shared__pb2._UUID
_PROPOSALREQUEST.fields_by_name['epoch_leader_instance_id'].message_type = shared__pb2._UUID
_ACCEPTREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_ACCEPTREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_ACCEPTREQUEST.fields_by_name['leader_id'].message_type = shared__pb2._UUID
_ACCEPTREQUEST.fields_by_name['leader_http'].message_type = _ENDPOINT
_LEADERISRESIGNINGREQUEST.fields_by_name['leader_id'].message_type = shared__pb2._UUID
_LEADERISRESIGNINGREQUEST.fields_by_name['leader_http'].message_type = _ENDPOINT
_LEADERISRESIGNINGOKREQUEST.fields_by_name['leader_id'].message_type = shared__pb2._UUID
_LEADERISRESIGNINGOKREQUEST.fields_by_name['leader_http'].message_type = _ENDPOINT
_LEADERISRESIGNINGOKREQUEST.fields_by_name['server_id'].message_type = shared__pb2._UUID
_LEADERISRESIGNINGOKREQUEST.fields_by_name['server_http'].message_type = _ENDPOINT
_CLUSTERINFO.fields_by_name['members'].message_type = _MEMBERINFO
_MEMBERINFO.fields_by_name['instance_id'].message_type = shared__pb2._UUID
_MEMBERINFO.fields_by_name['state'].enum_type = _MEMBERINFO_VNODESTATE
_MEMBERINFO.fields_by_name['http_end_point'].message_type = _ENDPOINT
_MEMBERINFO.fields_by_name['internal_tcp'].message_type = _ENDPOINT
_MEMBERINFO.fields_by_name['external_tcp'].message_type = _ENDPOINT
_MEMBERINFO.fields_by_name['epoch_id'].message_type = shared__pb2._UUID
_MEMBERINFO_VNODESTATE.containing_type = _MEMBERINFO
DESCRIPTOR.message_types_by_name['GossipRequest'] = _GOSSIPREQUEST
DESCRIPTOR.message_types_by_name['ViewChangeRequest'] = _VIEWCHANGEREQUEST
DESCRIPTOR.message_types_by_name['ViewChangeProofRequest'] = _VIEWCHANGEPROOFREQUEST
DESCRIPTOR.message_types_by_name['PrepareRequest'] = _PREPAREREQUEST
DESCRIPTOR.message_types_by_name['PrepareOkRequest'] = _PREPAREOKREQUEST
DESCRIPTOR.message_types_by_name['ProposalRequest'] = _PROPOSALREQUEST
DESCRIPTOR.message_types_by_name['AcceptRequest'] = _ACCEPTREQUEST
DESCRIPTOR.message_types_by_name['LeaderIsResigningRequest'] = _LEADERISRESIGNINGREQUEST
DESCRIPTOR.message_types_by_name['LeaderIsResigningOkRequest'] = _LEADERISRESIGNINGOKREQUEST
DESCRIPTOR.message_types_by_name['ClusterInfo'] = _CLUSTERINFO
DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
DESCRIPTOR.message_types_by_name['MemberInfo'] = _MEMBERINFO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GossipRequest = _reflection.GeneratedProtocolMessageType('GossipRequest', (_message.Message,), {
'DESCRIPTOR' : _GOSSIPREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.GossipRequest)
})
_sym_db.RegisterMessage(GossipRequest)
ViewChangeRequest = _reflection.GeneratedProtocolMessageType('ViewChangeRequest', (_message.Message,), {
'DESCRIPTOR' : _VIEWCHANGEREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.ViewChangeRequest)
})
_sym_db.RegisterMessage(ViewChangeRequest)
ViewChangeProofRequest = _reflection.GeneratedProtocolMessageType('ViewChangeProofRequest', (_message.Message,), {
'DESCRIPTOR' : _VIEWCHANGEPROOFREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.ViewChangeProofRequest)
})
_sym_db.RegisterMessage(ViewChangeProofRequest)
PrepareRequest = _reflection.GeneratedProtocolMessageType('PrepareRequest', (_message.Message,), {
'DESCRIPTOR' : _PREPAREREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.PrepareRequest)
})
_sym_db.RegisterMessage(PrepareRequest)
PrepareOkRequest = _reflection.GeneratedProtocolMessageType('PrepareOkRequest', (_message.Message,), {
'DESCRIPTOR' : _PREPAREOKREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.PrepareOkRequest)
})
_sym_db.RegisterMessage(PrepareOkRequest)
ProposalRequest = _reflection.GeneratedProtocolMessageType('ProposalRequest', (_message.Message,), {
'DESCRIPTOR' : _PROPOSALREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.ProposalRequest)
})
_sym_db.RegisterMessage(ProposalRequest)
AcceptRequest = _reflection.GeneratedProtocolMessageType('AcceptRequest', (_message.Message,), {
'DESCRIPTOR' : _ACCEPTREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.AcceptRequest)
})
_sym_db.RegisterMessage(AcceptRequest)
LeaderIsResigningRequest = _reflection.GeneratedProtocolMessageType('LeaderIsResigningRequest', (_message.Message,), {
'DESCRIPTOR' : _LEADERISRESIGNINGREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.LeaderIsResigningRequest)
})
_sym_db.RegisterMessage(LeaderIsResigningRequest)
LeaderIsResigningOkRequest = _reflection.GeneratedProtocolMessageType('LeaderIsResigningOkRequest', (_message.Message,), {
'DESCRIPTOR' : _LEADERISRESIGNINGOKREQUEST,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.LeaderIsResigningOkRequest)
})
_sym_db.RegisterMessage(LeaderIsResigningOkRequest)
ClusterInfo = _reflection.GeneratedProtocolMessageType('ClusterInfo', (_message.Message,), {
'DESCRIPTOR' : _CLUSTERINFO,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.ClusterInfo)
})
_sym_db.RegisterMessage(ClusterInfo)
EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
'DESCRIPTOR' : _ENDPOINT,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.EndPoint)
})
_sym_db.RegisterMessage(EndPoint)
MemberInfo = _reflection.GeneratedProtocolMessageType('MemberInfo', (_message.Message,), {
'DESCRIPTOR' : _MEMBERINFO,
'__module__' : 'cluster_pb2'
# @@protoc_insertion_point(class_scope:event_store.cluster.MemberInfo)
})
_sym_db.RegisterMessage(MemberInfo)
DESCRIPTOR._options = None
_GOSSIP = _descriptor.ServiceDescriptor(
name='Gossip',
full_name='event_store.cluster.Gossip',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=3334,
serialized_end=3498,
methods=[
_descriptor.MethodDescriptor(
name='Update',
full_name='event_store.cluster.Gossip.Update',
index=0,
containing_service=None,
input_type=_GOSSIPREQUEST,
output_type=_CLUSTERINFO,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Read',
full_name='event_store.cluster.Gossip.Read',
index=1,
containing_service=None,
input_type=shared__pb2._EMPTY,
output_type=_CLUSTERINFO,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_GOSSIP)
DESCRIPTOR.services_by_name['Gossip'] = _GOSSIP
_ELECTIONS = _descriptor.ServiceDescriptor(
name='Elections',
full_name='event_store.cluster.Elections',
file=DESCRIPTOR,
index=1,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=3501,
serialized_end=4238,
methods=[
_descriptor.MethodDescriptor(
name='ViewChange',
full_name='event_store.cluster.Elections.ViewChange',
index=0,
containing_service=None,
input_type=_VIEWCHANGEREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ViewChangeProof',
full_name='event_store.cluster.Elections.ViewChangeProof',
index=1,
containing_service=None,
input_type=_VIEWCHANGEPROOFREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Prepare',
full_name='event_store.cluster.Elections.Prepare',
index=2,
containing_service=None,
input_type=_PREPAREREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='PrepareOk',
full_name='event_store.cluster.Elections.PrepareOk',
index=3,
containing_service=None,
input_type=_PREPAREOKREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Proposal',
full_name='event_store.cluster.Elections.Proposal',
index=4,
containing_service=None,
input_type=_PROPOSALREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Accept',
full_name='event_store.cluster.Elections.Accept',
index=5,
containing_service=None,
input_type=_ACCEPTREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='LeaderIsResigning',
full_name='event_store.cluster.Elections.LeaderIsResigning',
index=6,
containing_service=None,
input_type=_LEADERISRESIGNINGREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='LeaderIsResigningOk',
full_name='event_store.cluster.Elections.LeaderIsResigningOk',
index=7,
containing_service=None,
input_type=_LEADERISRESIGNINGOKREQUEST,
output_type=shared__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_ELECTIONS)
DESCRIPTOR.services_by_name['Elections'] = _ELECTIONS
# @@protoc_insertion_point(module_scope)
| 48.832068
| 6,611
| 0.769917
| 7,412
| 57,866
| 5.659066
| 0.04884
| 0.045584
| 0.087424
| 0.072094
| 0.807414
| 0.775992
| 0.734319
| 0.702324
| 0.687853
| 0.678126
| 0
| 0.038574
| 0.116096
| 57,866
| 1,184
| 6,612
| 48.873311
| 0.781497
| 0.019061
| 0
| 0.690027
| 1
| 0.002695
| 0.219506
| 0.17883
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004492
| 0
| 0.004492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1fd820d71a81e5de6f82bf5b95925fc768bc1332
| 5,749
|
py
|
Python
|
tests/test_SVD.py
|
HyunwooJe/REC4824
|
c67aeedb88ff166fc5e0ef2259d86273a073b28b
|
[
"BSD-3-Clause"
] | 1
|
2017-03-14T14:12:53.000Z
|
2017-03-14T14:12:53.000Z
|
tests/test_SVD.py
|
HyunwooJe/REC4824
|
c67aeedb88ff166fc5e0ef2259d86273a073b28b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_SVD.py
|
HyunwooJe/REC4824
|
c67aeedb88ff166fc5e0ef2259d86273a073b28b
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Module for testing the SVD and SVD++ algorithms.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
from surprise import SVD
from surprise import SVDpp
from surprise import Dataset
from surprise import Reader
from surprise import evaluate
# the test and train files are from the ml-100k dataset (10% of u1.base and
# 10 % of u1.test)
train_file = os.path.join(os.path.dirname(__file__), './u1_ml100k_train')
test_file = os.path.join(os.path.dirname(__file__), './u1_ml100k_test')
data = Dataset.load_from_folds([(train_file, test_file)], Reader('ml-100k'))
def test_SVD_parameters():
"""Ensure that all parameters are taken into account."""
# The baseline against which to compare.
algo = SVD(n_factors=1, n_epochs=1)
rmse_default = evaluate(algo, data, measures=['rmse'])['rmse']
# n_factors
algo = SVD(n_factors=2, n_epochs=1)
rmse_factors = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_factors
# n_epochs
algo = SVD(n_factors=1, n_epochs=2)
rmse_n_epochs = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_n_epochs
# biased
algo = SVD(n_factors=1, n_epochs=1, biased=False)
rmse_biased = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_biased
# lr_all
algo = SVD(n_factors=1, n_epochs=1, lr_all=5)
rmse_lr_all = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_all
# reg_all
algo = SVD(n_factors=1, n_epochs=1, reg_all=5)
rmse_reg_all = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_all
# lr_bu
algo = SVD(n_factors=1, n_epochs=1, lr_bu=5)
rmse_lr_bu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_bu
# lr_bi
algo = SVD(n_factors=1, n_epochs=1, lr_bi=5)
rmse_lr_bi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_bi
# lr_pu
algo = SVD(n_factors=1, n_epochs=1, lr_pu=5)
rmse_lr_pu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_pu
# lr_qi
algo = SVD(n_factors=1, n_epochs=1, lr_qi=5)
rmse_lr_qi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_qi
# reg_bu
algo = SVD(n_factors=1, n_epochs=1, reg_bu=5)
rmse_reg_bu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_bu
# reg_bi
algo = SVD(n_factors=1, n_epochs=1, reg_bi=5)
rmse_reg_bi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_bi
# reg_pu
algo = SVD(n_factors=1, n_epochs=1, reg_pu=5)
rmse_reg_pu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_pu
# reg_qi
algo = SVD(n_factors=1, n_epochs=1, reg_qi=5)
rmse_reg_qi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_qi
def test_SVDpp_parameters():
"""Ensure that all parameters are taken into account."""
# The baseline against which to compare.
algo = SVDpp(n_factors=1, n_epochs=1)
rmse_default = evaluate(algo, data, measures=['rmse'])['rmse']
# n_factors
algo = SVDpp(n_factors=2, n_epochs=1)
rmse_factors = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_factors
# The rest is OK but just takes too long for now...
"""
# n_epochs
algo = SVDpp(n_factors=1, n_epochs=2)
rmse_n_epochs = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_n_epochs
# lr_all
algo = SVDpp(n_factors=1, n_epochs=1, lr_all=5)
rmse_lr_all = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_all
# reg_all
algo = SVDpp(n_factors=1, n_epochs=1, reg_all=5)
rmse_reg_all = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_all
# lr_bu
algo = SVDpp(n_factors=1, n_epochs=1, lr_bu=5)
rmse_lr_bu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_bu
# lr_bi
algo = SVDpp(n_factors=1, n_epochs=1, lr_bi=5)
rmse_lr_bi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_bi
# lr_pu
algo = SVDpp(n_factors=1, n_epochs=1, lr_pu=5)
rmse_lr_pu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_pu
# lr_qi
algo = SVDpp(n_factors=1, n_epochs=1, lr_qi=5)
rmse_lr_qi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_qi
# lr_yj
algo = SVDpp(n_factors=1, n_epochs=1, lr_yj=5)
rmse_lr_yj = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_lr_yj
# reg_bu
algo = SVDpp(n_factors=1, n_epochs=1, reg_bu=5)
rmse_reg_bu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_bu
# reg_bi
algo = SVDpp(n_factors=1, n_epochs=1, reg_bi=5)
rmse_reg_bi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_bi
# reg_pu
algo = SVDpp(n_factors=1, n_epochs=1, reg_pu=5)
rmse_reg_pu = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_pu
# reg_qi
algo = SVDpp(n_factors=1, n_epochs=1, reg_qi=5)
rmse_reg_qi = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_qi
# reg_yj
algo = SVDpp(n_factors=1, n_epochs=1, reg_yj=5)
rmse_reg_yj = evaluate(algo, data, measures=['rmse'])['rmse']
assert rmse_default != rmse_reg_yj
"""
| 32.664773
| 76
| 0.667594
| 911
| 5,749
| 3.920966
| 0.090011
| 0.068589
| 0.129899
| 0.194849
| 0.854703
| 0.854703
| 0.854703
| 0.850224
| 0.841265
| 0.794513
| 0
| 0.021529
| 0.192033
| 5,749
| 175
| 77
| 32.851429
| 0.74747
| 0.082275
| 0
| 0.101695
| 0
| 0
| 0.055519
| 0
| 0
| 0
| 0
| 0
| 0.237288
| 1
| 0.033898
| false
| 0
| 0.118644
| 0
| 0.152542
| 0.016949
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2f8adb71e3814e9bf3626f8b2f3f10a3827888f7
| 1,548
|
py
|
Python
|
backend/tests/stores/genericstoretests/generic_store_attendees_tests.py
|
fjacob21/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | null | null | null |
backend/tests/stores/genericstoretests/generic_store_attendees_tests.py
|
fjacob21/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | 88
|
2016-11-12T14:54:38.000Z
|
2018-08-02T00:25:07.000Z
|
backend/tests/stores/genericstoretests/generic_store_attendees_tests.py
|
mididecouverte/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | null | null | null |
def test_attendees(attendees):
attendees.add('user', 'event')
assert len(attendees.get_all('event')) == 1
attendee = attendees.get_all('event')[0]
assert attendee
assert attendee['user_id'] == 'user'
assert attendee['event_id'] == 'event'
attendees.delete('user', 'event')
attendees.delete('user', 'event')
attendees.add('user', 'event')
attendees.reset()
assert len(attendees.get_all('event')) == 0
attendees.reset()
attendees.add('user', 'event')
attendees.clean()
assert len(attendees.get_all('event')) == 0
attendees.clean()
attendees.add('user', 'event')
assert len(attendees.get_all('event')) == 1
attendee = attendees.get_all('event')[0]
assert attendee
assert attendee['user_id'] == 'user'
assert attendee['event_id'] == 'event'
assert attendee['present'] == False
assert attendee['present_time'] == ''
attendees.update('user', 'event', True)
attendee = attendees.get_all('event')[0]
assert attendee
print(attendee)
assert attendee['user_id'] == 'user'
assert attendee['event_id'] == 'event'
assert attendee['present'] == True
assert attendee['present_time'] != ''
attendees.update('user', 'event', False)
attendee = attendees.get_all('event')[0]
assert attendee
assert attendee['user_id'] == 'user'
assert attendee['event_id'] == 'event'
assert attendee['present'] == False
assert attendee['present_time'] == ''
attendees.clean()
| 33.652174
| 48
| 0.620801
| 174
| 1,548
| 5.408046
| 0.132184
| 0.2678
| 0.127524
| 0.170032
| 0.927736
| 0.873539
| 0.812965
| 0.812965
| 0.616366
| 0.616366
| 0
| 0.006552
| 0.21124
| 1,548
| 45
| 49
| 34.4
| 0.764128
| 0
| 0
| 0.853659
| 0
| 0
| 0.176431
| 0
| 0
| 0
| 0
| 0
| 0.536585
| 1
| 0.02439
| false
| 0
| 0
| 0
| 0.02439
| 0.02439
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c0dc4d343daf104349bc9b41d3ffe54d3463e6b0
| 2,011
|
py
|
Python
|
pegasus/eval/rouge_tensors.py
|
jacob-parnell-rozetta/pegasus
|
ae08e41b32b1429e9f24b8a3b97dbb4d17bd2546
|
[
"Apache-2.0"
] | null | null | null |
pegasus/eval/rouge_tensors.py
|
jacob-parnell-rozetta/pegasus
|
ae08e41b32b1429e9f24b8a3b97dbb4d17bd2546
|
[
"Apache-2.0"
] | null | null | null |
pegasus/eval/rouge_tensors.py
|
jacob-parnell-rozetta/pegasus
|
ae08e41b32b1429e9f24b8a3b97dbb4d17bd2546
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
from rouge_score import rouge_scorer
def evaluate_r1(tensor1, tensor2, variant=2):
_ROUGE_METRIC = "rouge1"
scorer = rouge_scorer.RougeScorer([_ROUGE_METRIC], use_stemmer=True)
tensor1 = tensor1.numpy().decode("utf-8") # decodes the target tensor
tensor2 = tensor2.numpy().decode("utf-8") # decodes the pred tensor
r_score = scorer.score(tensor1, tensor2) # calculates the rouge scores
r_score_f1 = {el: 0 for el in list(r_score.keys())} # set empty rouge dict
r_score_f1.update({_ROUGE_METRIC: float(list(r_score[_ROUGE_METRIC])[variant])})
tensor_result = tf.Variable(r_score_f1[_ROUGE_METRIC], shape=()).read_value()
return tensor_result
def evaluate_r2(tensor1, tensor2, variant=2):
_ROUGE_METRIC = "rouge2"
scorer = rouge_scorer.RougeScorer([_ROUGE_METRIC], use_stemmer=True)
tensor1 = tensor1.numpy().decode("utf-8") # decodes the target tensor
tensor2 = tensor2.numpy().decode("utf-8") # decodes the pred tensor
r_score = scorer.score(tensor1, tensor2) # calculates the rouge scores
r_score_f1 = {el: 0 for el in list(r_score.keys())} # set empty rouge dict
r_score_f1.update({_ROUGE_METRIC: float(list(r_score[_ROUGE_METRIC])[variant])})
tensor_result = tf.Variable(r_score_f1[_ROUGE_METRIC], shape=()).read_value()
return tensor_result
def evaluate_rl(tensor1, tensor2, variant=2):
_ROUGE_METRIC = "rougeL"
scorer = rouge_scorer.RougeScorer([_ROUGE_METRIC], use_stemmer=True)
tensor1 = tensor1.numpy().decode("utf-8") # decodes the target tensor
tensor2 = tensor2.numpy().decode("utf-8") # decodes the pred tensor
r_score = scorer.score(tensor1, tensor2) # calculates the rouge scores
r_score_f1 = {el: 0 for el in list(r_score.keys())} # set empty rouge dict
r_score_f1.update({_ROUGE_METRIC: float(list(r_score[_ROUGE_METRIC])[variant])})
tensor_result = tf.Variable(r_score_f1[_ROUGE_METRIC], shape=()).read_value()
return tensor_result
| 39.431373
| 84
| 0.72004
| 289
| 2,011
| 4.743945
| 0.190311
| 0.078775
| 0.052516
| 0.065646
| 0.937272
| 0.937272
| 0.865062
| 0.865062
| 0.865062
| 0.865062
| 0
| 0.028926
| 0.157633
| 2,011
| 50
| 85
| 40.22
| 0.780401
| 0.14719
| 0
| 0.75
| 0
| 0
| 0.028202
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09375
| false
| 0
| 0.0625
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f1d6bafd232b82612e9964e30b24cb8940cc0919
| 14,834
|
py
|
Python
|
geoloc-server/ppstore/traindata.py
|
muzammilar/passport
|
7918561916fbcb5e82cd73d577873fb17a819d19
|
[
"BSD-3-Clause"
] | 1
|
2021-12-06T01:32:56.000Z
|
2021-12-06T01:32:56.000Z
|
geoloc-server/ppstore/traindata.py
|
muzammilar/passport
|
7918561916fbcb5e82cd73d577873fb17a819d19
|
[
"BSD-3-Clause"
] | null | null | null |
geoloc-server/ppstore/traindata.py
|
muzammilar/passport
|
7918561916fbcb5e82cd73d577873fb17a819d19
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
ppstore.traindata
~~~~~~
This module has been developed to be used as wrapper to read the ground truth
training data as well as reading the data stored in the local database from
multiple geolocation services. This is the primary module to read the training
dataset for the classifiers, and read the cached information about an
IP address (hostname, country predicted by geolocation services,
WhoIS information, etc) from the database.
:author: Muzammil Abdul Rehman
:copyright: Northeastern University © 2018.
:license: Custom BSD, see LICENSE for more details.
:email: passport@ccs.neu.edu
"""
import json
import urllib2
import requests
###remove-me-later-muz######remove-me-later-muz###import ormsettings as DJANGO_SETTINGS # don't remove
###remove-me-later-muz######remove-me-later-muz###from django.db import models
###remove-me-later-muz###from django.db.models import Q
from django.db.models import Count as databaseCount
from ppstore.models import DDEC_Hostname
from ppstore.models import Hints_DDEC_Location_Lat_Long
from ppstore.models import IP_WHOIS_INFORMATION
from ppstore.models import Hints_AS_INFO
from ppstore.models import Loc_Source_DB_IP
from ppstore.models import Loc_Source_EUREKAPI
from ppstore.models import Loc_Source_IP2LOCATION
from ppstore.models import Loc_Source_IPINFO_IO
from ppstore.models import Loc_Source_MAXMIND_GEOLITE_CITY
from ppstore.models import CLASSIFIER_DATA_TRAIN
from ppstore.models import CLASSIFIER_DATA_TRAIN_OPENIPMAP
from ppstore.models import CLASSIFIER_DATA_TRAIN_GROUND
from ppmeasurements.util import is_private_ip
# change this code to connect to the database directly.
# this function either gets data from the database or the server. and returns a python object.
def to_string(x):
if x is None:
return ''
return str(x.encode('unicode-escape'))
def to_int(x):
if x is None:
return -1
return x
# legacy function. It has been depricated.
def get_training_data_all():
cls_hosts = CLASSIFIER_DATA_TRAIN.objects.all()
training_data = []
for cur_host in cls_hosts:
cur_train_instance = {'ip': to_string(cur_host.ip),
'asn': to_int(cur_host.asn),
'asn_registry': to_string(cur_host.asn_registry),
'isp': to_string(cur_host.isp),
'isp_city': to_string(cur_host.isp_city),
'isp_region': to_string(cur_host.isp_region),
'DDECCountry': to_string(cur_host.DDECcountry),
'ASCountry': to_string(cur_host.AScountry),
'ISPCountry': to_string(cur_host.ISPcountry),
'db_ip_country': to_string(cur_host.db_ip_country),
'eurekapi_country': to_string(cur_host.eurekapi_country),
'ip2location_country': to_string(cur_host.ip2location_country),
'ipinfo_country': to_string(cur_host.ipinfo_country),
'maxmind_country': to_string(cur_host.maxmind_country),
'as_name': to_string(cur_host.as_name),
'num_as_in_org': to_int(cur_host.num_as_in_org),
'num_ipv4_prefix_in_org': to_int(cur_host.num_ipv4_prefix_in_org),
'num_ipv4_ip_in_org': to_int(cur_host.num_ipv4_ip_in_org),
'realcountry': to_string(cur_host.realcountry)}
#cur_train_instance['asn_cidr_bgp'] = getString(cur_host.asn_cidr_bgp)
#cur_train_instance['hostname'] = getString(cur_host.hostname)
training_data.append(cur_train_instance)
return training_data
def get_training_data_all_openipmap():
return get_training_data_all_table(CLASSIFIER_DATA_TRAIN_OPENIPMAP)
def get_training_data_all_ground():
return get_training_data_all_table(CLASSIFIER_DATA_TRAIN_GROUND)
def get_training_data_all_table(TABLE_NAME):
cls_hosts = TABLE_NAME.objects.all()
training_data = []
for cur_host in cls_hosts:
cur_train_instance = {'ip': to_string(cur_host.ip),
'asn': to_int(cur_host.asn),
'asn_registry': to_string(cur_host.asn_registry),
'isp': to_string(cur_host.isp),
'isp_city': to_string(cur_host.isp_city),
'isp_region': to_string(cur_host.isp_region),
'DDECCountry': to_string(cur_host.DDECcountry),
'ASCountry': to_string(cur_host.AScountry),
'ISPCountry': to_string(cur_host.ISPcountry),
'db_ip_country': to_string(cur_host.db_ip_country),
'eurekapi_country': to_string(cur_host.eurekapi_country),
'ip2location_country': to_string(cur_host.ip2location_country),
'ipinfo_country': to_string(cur_host.ipinfo_country),
'maxmind_country': to_string(cur_host.maxmind_country),
'as_name': to_string(cur_host.as_name),
'num_as_in_org': to_int(cur_host.num_as_in_org),
'num_ipv4_prefix_in_org': to_int(cur_host.num_ipv4_prefix_in_org),
'num_ipv4_ip_in_org': to_int(cur_host.num_ipv4_ip_in_org),
'realcountry': to_string(cur_host.realcountry)}
#cur_train_instance['asn_cidr_bgp'] = getString(cur_host.asn_cidr_bgp)
#cur_train_instance['hostname'] = getString(cur_host.hostname)
training_data.append(cur_train_instance)
return training_data
def get_training_data_country(country_info):
country = country_info["country"]
cls_hosts = CLASSIFIER_DATA_TRAIN.objects.filter(realcountry__iexact=country)
training_data = []
for cur_host in cls_hosts:
cur_train_instance = {'ip': to_string(cur_host.ip),
'asn': to_int(cur_host.asn),
'asn_registry': to_string(cur_host.asn_registry),
'isp': to_string(cur_host.isp),
'isp_city': to_string(cur_host.isp_city),
'isp_region': to_string(cur_host.isp_region),
'DDECCountry': to_string(cur_host.DDECcountry),
'ASCountry': to_string(cur_host.AScountry),
'ISPCountry': to_string(cur_host.ISPcountry),
'db_ip_country': to_string(cur_host.db_ip_country),
'eurekapi_country': to_string(cur_host.eurekapi_country),
'ip2location_country': to_string(cur_host.ip2location_country),
'ipinfo_country': to_string(cur_host.ipinfo_country),
'maxmind_country': to_string(cur_host.maxmind_country),
'as_name': to_string(cur_host.as_name),
'num_as_in_org': to_int(cur_host.num_as_in_org),
'num_ipv4_prefix_in_org': to_int(cur_host.num_ipv4_prefix_in_org),
'num_ipv4_ip_in_org': to_int(cur_host.num_ipv4_ip_in_org),
'realcountry': to_string(cur_host.realcountry)}
#cur_train_instance['asn_cidr_bgp'] = getString(cur_host.asn_cidr_bgp)
#cur_train_instance['hostname'] = getString(cur_host.hostname)
training_data.append(cur_train_instance)
return training_data
def get_all_countries():
country_count_hsts = CLASSIFIER_DATA_TRAIN.objects.all().values(
'realcountry').annotate(total=databaseCount('realcountry'))
country_count_list = []
for cntry in country_count_hsts:
if cntry['realcountry'] == '':
continue
cc_dict = {"country": cntry['realcountry'],
"count": cntry['total']}
country_count_list.append(cc_dict)
#print "Total Train Countries: ", len(country_count_list)
return country_count_list
def get_ground_truth_all():
cls_hosts = CLASSIFIER_DATA_TRAIN.objects.all()
real_dict = {}
for cur_host in cls_hosts:
ip_addr = to_string(cur_host.ip)
if is_private_ip(ip_addr):
continue
realcountry = to_string(cur_host.realcountry)
real_dict[ip_addr] = realcountry
return real_dict
def get_all_test_data():
# this funciton is incomplete and doesn't work
testing_data = []
all_hsts = []
for cur_hst in all_hsts:
ip_str = cur_hst.ip
hst_nm = cur_hst.hostname
cur_train_instance = {}
cur_train_instance['ip'] = ip_str
try:
host_objs = DDEC_Hostname.objects.filter(hostname=hst_nm)
loc = host_objs[0].location
x = Hints_DDEC_Location_Lat_Long.objects.filter(location=loc)
cur_train_instance['DDECCountry'] = to_string(x.country)
except:
cur_train_instance['DDECCountry'] = to_string('')
try:
cur_train_instance['db_ip_country'] = Loc_Source_DB_IP.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['db_ip_country'] = ''
try:
cur_train_instance['ipinfo_country'] = Loc_Source_IPINFO_IO.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['ipinfo_country'] = ''
try:
cur_train_instance['eurekapi_country'] = Loc_Source_EUREKAPI.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['eurekapi_country'] = ''
try:
cur_train_instance['ip2location_country'] = Loc_Source_IP2LOCATION.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['ip2location_country'] = ''
try:
cur_train_instance['maxmind_country'] = Loc_Source_MAXMIND_GEOLITE_CITY.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['maxmind_country'] = ''
asn_num = -1
try:
ip_object = IP_WHOIS_INFORMATION.objects.filter(ip=ip_str)[0]
asn_num = ip_object.asn
cur_train_instance['asn'] = ip_object.asn
cur_train_instance['asn_registry'] = ip_object.asn_registry
cur_train_instance['isp'] = ip_object.isp
cur_train_instance['isp_city'] = ip_object.isp_city
cur_train_instance['isp_region'] = ip_object.isp_region
cur_train_instance['ISPCountry'] = ip_object.isp_country
cur_train_instance['ASCountry'] = ip_object.asn_country
except:
cur_train_instance['asn_registry'] = ''
cur_train_instance['isp'] = ''
cur_train_instance['isp_city'] = ''
cur_train_instance['isp_region'] = ''
cur_train_instance['ISPCountry'] = ''
cur_train_instance['ASCountry'] = ''
cur_train_instance['asn'] = -1
try:
asn_object = Hints_AS_INFO.objects.filter(as_number=asn_num)[0]
cur_train_instance['as_name'] = asn_object.as_name
cur_train_instance['num_as_in_org'] = asn_object.num_as_in_org
cur_train_instance['num_ipv4_prefix_in_org'] = asn_object.num_ipv4_prefix_in_org
cur_train_instance['num_ipv4_ip_in_org'] = asn_object.num_ipv4_ip_in_org
except:
cur_train_instance['as_name'] = ''
cur_train_instance['num_as_in_org'] = -1
cur_train_instance['num_ipv4_prefix_in_org'] = -1
cur_train_instance['num_ipv4_ip_in_org'] = -1
testing_data.append(cur_train_instance)
return testing_data
def get_test_data(ip_str,host_name=''):
hst_nm = host_name
cur_train_instance = {}
cur_train_instance['ip'] = ip_str
try:
host_objs = DDEC_Hostname.objects.filter(hostname=hst_nm)
loc = host_objs[0].location
x = Hints_DDEC_Location_Lat_Long.objects.filter(location=loc)
cur_train_instance['DDECCountry'] = to_string(x.country)
except:
cur_train_instance['DDECCountry'] = to_string('')
try:
cur_train_instance['db_ip_country'] = Loc_Source_DB_IP.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['db_ip_country'] = ''
try:
cur_train_instance['ipinfo_country'] = Loc_Source_IPINFO_IO.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['ipinfo_country'] = ''
try:
cur_train_instance['eurekapi_country'] = Loc_Source_EUREKAPI.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['eurekapi_country'] = ''
try:
cur_train_instance['ip2location_country'] = Loc_Source_IP2LOCATION.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['ip2location_country'] = ''
try:
cur_train_instance['maxmind_country'] = Loc_Source_MAXMIND_GEOLITE_CITY.objects.filter(ip=ip_str)[0].country
except:
cur_train_instance['maxmind_country'] = ''
asn_num = -1
try:
ip_object = IP_WHOIS_INFORMATION.objects.filter(ip=ip_str)[0]
asn_num = ip_object.asn
cur_train_instance['asn'] = ip_object.asn
cur_train_instance['asn_registry'] = ip_object.asn_registry
cur_train_instance['isp'] = ip_object.isp
cur_train_instance['isp_city'] = ip_object.isp_city
cur_train_instance['isp_region'] = ip_object.isp_region
cur_train_instance['ISPCountry'] = ip_object.isp_country
cur_train_instance['ASCountry'] = ip_object.asn_country
except:
cur_train_instance['asn_registry'] = ''
cur_train_instance['isp'] = ''
cur_train_instance['isp_city'] = ''
cur_train_instance['isp_region'] = ''
cur_train_instance['ISPCountry'] = ''
cur_train_instance['ASCountry'] = ''
cur_train_instance['asn'] = -1
try:
asn_object = Hints_AS_INFO.objects.filter(as_number=asn_num)[0]
cur_train_instance['as_name'] = asn_object.as_name
cur_train_instance['num_as_in_org'] = asn_object.num_as_in_org
cur_train_instance['num_ipv4_prefix_in_org'] = asn_object.num_ipv4_prefix_in_org
cur_train_instance['num_ipv4_ip_in_org'] = asn_object.num_ipv4_ip_in_org
except:
cur_train_instance['as_name'] = ''
cur_train_instance['num_as_in_org'] = -1
cur_train_instance['num_ipv4_prefix_in_org'] = -1
cur_train_instance['num_ipv4_ip_in_org'] = -1
return cur_train_instance
| 46.647799
| 120
| 0.641499
| 1,876
| 14,834
| 4.642857
| 0.101812
| 0.07899
| 0.157979
| 0.080941
| 0.806085
| 0.778186
| 0.742021
| 0.722962
| 0.707578
| 0.696556
| 0
| 0.00641
| 0.263853
| 14,834
| 317
| 121
| 46.794953
| 0.791117
| 0.101186
| 0
| 0.760784
| 0
| 0
| 0.116829
| 0.011638
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043137
| false
| 0
| 0.066667
| 0.007843
| 0.160784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7b322d131745ecf224d447498b9182919225bd06
| 14,941
|
py
|
Python
|
dvfile/data.py
|
oxygen-dioxide/dvfile
|
a8ab1434deac1838e662381b6f89ffda3b5f6bf9
|
[
"MulanPSL-1.0"
] | 2
|
2020-10-15T13:53:42.000Z
|
2021-06-07T05:46:19.000Z
|
dvfile/data.py
|
oxygen-dioxide/dvfile
|
a8ab1434deac1838e662381b6f89ffda3b5f6bf9
|
[
"MulanPSL-1.0"
] | null | null | null |
dvfile/data.py
|
oxygen-dioxide/dvfile
|
a8ab1434deac1838e662381b6f89ffda3b5f6bf9
|
[
"MulanPSL-1.0"
] | null | null | null |
#data2:dv文件中一段不明固定数据,写入文件时用到
data2=b'\x04\x10\x00\x00\x00\x04\x00\x00\x07\xefR\xb4y\xca\xc6\xbb\xd0\xd8F\xbcI,\x95\xbc\x98\xf1\xc6\xbc\xac\xbb\xf8\xbc\xeeD\x15\xbd\x81-.\xbd\xbb\x17G\xbd\x07\x03`\xbd\x15\xefx\xbd\xc9\xed\x88\xbd\x16d\x95\xbdG\xda\xa1\xbd6P\xae\xbd\xb8\xc5\xba\xbd\xa6:\xc7\xbd\xd6\xae\xd3\xbd#"\xe0\xbdb\x94\xec\xbdn\x05\xf9\xbd\x8e\xba\x02\xbe\x95\xf1\x08\xbe\xd5\'\x0f\xbe-]\x15\xbe\x88\x91\x1b\xbe\xd4\xc4!\xbe\xfd\xf6\'\xbe\xef\'.\xbe\x98W4\xbe\xe6\x85:\xbe\xc3\xb2@\xbe\x1f\xdeF\xbe\xe6\x07M\xbe\x050S\xbeZVY\xbe\xf3z_\xbe\xad\x9de\xbes\xbek\xbe7\xddq\xbe\xe6\xf9w\xbej\x14~\xbe[\x16\x82\xbe[!\x85\xbe++\x88\xbe\xc53\x8b\xbe\x1d;\x8e\xbe+A\x91\xbe\xe8E\x94\xbeII\x97\xbe?K\x9a\xbe\xd0K\x9d\xbe\xedJ\xa0\xbe\x8cH\xa3\xbe\xa4D\xa6\xbe/?\xa9\xbe"8\xac\xbew/\xaf\xbe$%\xb2\xbe \x19\xb5\xbee\x0b\xb8\xbe\xe9\xfb\xba\xbe\x9d\xea\xbd\xbe\x89\xd7\xc0\xbe\x9a\xc2\xc3\xbe\xcc\xab\xc6\xbe\x15\x93\xc9\xbelx\xcc\xbe\xcb[\xcf\xbe)=\xd2\xbe\x7f\x1c\xd5\xbe\xc4\xf9\xd7\xbe\xf1\xd4\xda\xbe\xff\xad\xdd\xbe\xe5\x84\xe0\xbe\x9bY\xe3\xbe\x1d,\xe6\xbeW\xfc\xe8\xbeU\xca\xeb\xbe\x04\x96\xee\xbe^_\xf1\xbe\\&\xf4\xbe\xf9\xea\xf6\xbe)\xad\xf9\xbe\xe7l\xfc\xbe,*\xff\xbez\xf2\x00\xbf\x98N\x02\xbfp\xa9\x03\xbf\xf9\x02\x05\xbf8[\x06\xbf%\xb2\x07\xbf\xbd\x07\t\xbf\xfc[\n\xbf\xde\xae\x0b\xbfb\x00\r\xbf\x83P\x0e\xbf=\x9f\x0f\xbf\x8e\xec\x10\xbfr8\x12\xbf\xe6\x82\x13\xbf\xe6\xcb\x14\xbfo\x13\x16\xbf\x80Y\x17\xbf\x12\x9e\x18\xbf$\xe1\x19\xbf\xb0"\x1b\xbf\xb8b\x1c\xbf7\xa1\x1d\xbf(\xde\x1e\xbf\x8a\x19 \xbfXS!\xbf\x91\x8b"\xbf0\xc2#\xbf4\xf7$\xbf\x98*&\xbfX\\\'\xbfu\x8c(\xbf\xea\xba)\xbf\xb4\xe7*\xbf\xd1\x12,\xbf><-\xbf\xf7c.\xbf\xfa\x89/\xbfD\xae0\xbf\xd3\xd01\xbf\xa3\xf12\xbf\xb1\x104\xbf\xfc-5\xbf\x80I6\xbf;c7\xbf+{8\xbfK\x919\xbf\x9a\xa5:\xbf\x13\xb8;\xbf\xb9\xc8<\xbf\x85\xd7=\xbfv\xe4>\xbf\x8a\xef?\xbf\xbd\xf8@\xbf\r\x00B\xbfx\x05C\xbf\xfc\x08D\xbf\x95\nE\xbf?\nF\xbf\xfe\x07G\xbf\xcc\x03H\xbf\xa5\xfdH\xbf\x89\xf5I\xbfu\xebJ\xbfg\xdfK\xbf^\xd1L\xbfU\xc1M\xbfK\xafN\xbf>\x9bO\xbf,\x85P\xbf\x12mQ\xbf\xefRR\xbf\xc26S\xbf\x87\x18T\xbf<\xf8T\xbf\xdd\xd5U\xbfl\xb1V\xbf\xe7\x8aW\xbfKbX\xbf\x947Y\xbf\xc2\nZ\xbf\xd3\xdbZ\xbf\xc5\xaa[\xbf\x95w\\\xbfBB]\xbf\xcc\n^\xbf/\xd1^\xbfj\x95_\xbf{W`\xbf`\x17a\xbf\x1a\xd5a\xbf\xa3\x90b\xbf\xfbIc\xbf!\x01d\xbf\x14\xb6d\xbf\xd0he\xbfV\x19f\xbf\xa3\xc7f\xbf\xb6sg\xbf\x8e\x1dh\xbf(\xc5h\xbf\x82ji\xbf\x9d\rj\xbfw\xaej\xbf\rMk\xbf`\xe9k\xbfl\x83l\xbf1\x1bm\xbf\xae\xb0m\xbf\xe2Cn\xbf\xca\xd4n\xbffco\xbf\xb4\xefo\xbf\xb4yp\xbfd\x01q\xbf\xc2\x86q\xbf\xcf\tr\xbf\x88\x8ar\xbf\xed\x08s\xbf\xfb\x84s\xbf\xb3\xfes\xbf\x13vt\xbf\x1a\xebt\xbf\xc8]u\xbf\x1b\xceu\xbf\x13<v\xbf\xac\xa7v\xbf\xe9\x10w\xbf\xc7ww\xbfE\xdcw\xbfc>x\xbf!\x9ex\xbf{\xfbx\xbfsVy\xbf\x08\xafy\xbf8\x05z\xbf\x03Yz\xbfj\xaaz\xbfh\xf9z\xbf\xffE{\xbf.\x90{\xbf\xf5\xd7{\xbfS\x1d|\xbfI`|\xbf\xd3\xa0|\xbf\xf2\xde|\xbf\xa5\x1a}\xbf\xedS}\xbf\xc9\x8a}\xbf8\xbf}\xbf9\xf1}\xbf\xcd ~\xbf\xf2M~\xbf\xa9x~\xbf\xf1\xa0~\xbf\xca\xc6~\xbf3\xea~\xbf,\x0b\x7f\xbf\xb6)\x7f\xbf\xceE\x7f\xbfv_\x7f\xbf\xaev\x7f\xbft\x8b\x7f\xbf\xc9\x9d\x7f\xbf\xac\xad\x7f\xbf\x1e\xbb\x7f\xbf\x1f\xc6\x7f\xbf\xad\xce\x7f\xbf\xca\xd4\x7f\xbfu\xd8\x7f\xbf\xae\xd9\x7f\xbfu\xd8\x7f\xbf\xca\xd4\x7f\xbf\xad\xce\x7f\xbf\x1f\xc6\x7f\xbf\x1e\xbb\x7f\xbf\xac\xad\x7f\xbf\xc9\x9d\x7f\xbft\x8b\x7f\xbf\xaev\x7f\xbfv_\x7f\xbf\xceE\x7f\xbf\xb6)\x7f\xbf,\x0b\x7f\xbf3\xea~\xbf\xca\xc6~\xbf\xf1\xa0~\xbf\xa9x~\xbf\xf2M~\xbf\xcd ~\xbf9\xf1}\xbf8\xbf}\xbf\xc9\x8a}\xbf\xeeS}\xbf\xa6\x1a}\xbf\xf2\xde|\xbf\xd3\xa0|\xbfI`|\xbfU\x1d|\xbf\xf7\xd7{\xbf0\x90{\xbf\x00F{\xbfh\xf9z\xbfj\xaaz\xbf\x04Yz\xbf9\x05z\xbf\t\xafy\xbfuVy\xbf|\xfbx\xbf!\x9ex\xbfd>x\xbfF\xdcw\xbf\xc8ww\xbf\xea\x10w\xbf\xad\xa7v\xbf\x13<v\xbf\x1c\xceu\xbf\xc9]u\xbf\x1c\xebt\xbf\x14vt\xbf\xb4\xfes\xbf\xfc\x84s\xbf\xed\x08s\xbf\x89\x8ar\xbf\xd0\tr\xbf\xc4\x86q\xbfe\x01q\xbf\xb5yp\xbf\xb5\xefo\xbfgco\xbf\xcb\xd4n\xbf\xe3Cn\xbf\xb0\xb0m\xbf3\x1bm\xbfm\x83l\xbfa\xe9k\xbf\x10Mk\xbfx\xaej\xbf\x9f\rj\xbf\x83ji\xbf)\xc5h\xbf\x8f\x1dh\xbf\xb8sg\xbf\xa6\xc7f\xbfW\x19f\xbf\xd2he\xbf\x15\xb6d\xbf#\x01d\xbf\xfcIc\xbf\xa4\x90b\xbf\x1b\xd5a\xbfc\x17a\xbf}W`\xbfl\x95_\xbf2\xd1^\xbf\xcf\n^\xbfFB]\xbf\x98w\\\xbf\xc7\xaa[\xbf\xd4\xdbZ\xbf\xc3\nZ\xbf\x957Y\xbfLbX\xbf\xe9\x8aW\xbfo\xb1V\xbf\xdf\xd5U\xbf<\xf8T\xbf\x88\x18T\xbf\xc46S\xbf\xf2RR\xbf\x14mQ\xbf.\x85P\xbf@\x9bO\xbfM\xafN\xbfV\xc1M\xbf_\xd1L\xbfj\xdfK\xbfx\xebJ\xbf\x8c\xf5I\xbf\xa8\xfdH\xbf\xce\x03H\xbf\x03\x08G\xbfD\nF\xbf\x97\nE\xbf\xfe\x08D\xbfz\x05C\xbf\x10\x00B\xbf\xbf\xf8@\xbf\x8b\xef?\xbfx\xe4>\xbf\x87\xd7=\xbf\xbb\xc8<\xbf\x16\xb8;\xbf\x9a\xa5:\xbfJ\x919\xbf-{8\xbf>c7\xbf\x82I6\xbf\xfe-5\xbf\xb4\x104\xbf\xa5\xf12\xbf\xd5\xd01\xbfF\xae0\xbf\xfc\x89/\xbf\xfac.\xbfA<-\xbf\xd4\x12,\xbf\xb8\xe7*\xbf\xec\xba)\xbfz\x8c(\xbf]\\\'\xbf\x9b*&\xbf6\xf7$\xbf3\xc2#\xbf\x93\x8b"\xbf[S!\xbf\x8d\x19 \xbf*\xde\x1e\xbf9\xa1\x1d\xbf\xbbb\x1c\xbf\xb3"\x1b\xbf#\xe1\x19\xbf\x11\x9e\x18\xbf\x82Y\x17\xbfr\x13\x16\xbf\xe9\xcb\x14\xbf\xe8\x82\x13\xbft8\x12\xbf\x91\xec\x10\xbf@\x9f\x0f\xbf\x86P\x0e\xbfe\x00\r\xbf\xe1\xae\x0b\xbf\xff[\n\xbf\xc0\x07\t\xbf\'\xb2\x07\xbf;[\x06\xbf\x00\x03\x05\xbfs\xa9\x03\xbf\x9bN\x02\xbf|\xf2\x00\xbf2*\xff\xbe\xedl\xfc\xbe/\xad\xf9\xbe\xff\xea\xf6\xbeb&\xf4\xbed_\xf1\xbe\n\x96\xee\xbe[\xca\xeb\xbe]\xfc\xe8\xbe#,\xe6\xbe\xa1Y\xe3\xbe\xeb\x84\xe0\xbe\x05\xae\xdd\xbe\xf7\xd4\xda\xbe\xca\xf9\xd7\xbe\x85\x1c\xd5\xbe/=\xd2\xbe\xd1[\xcf\xberx\xcc\xbe\x1b\x93\xc9\xbe\xd2\xab\xc6\xbe\xa1\xc2\xc3\xbe\x8e\xd7\xc0\xbe\xab\xea\xbd\xbe\xf0\xfb\xba\xbek\x0b\xb8\xbe\'\x19\xb5\xbe*%\xb2\xbe}/\xaf\xbe)8\xac\xbe6?\xa9\xbe\xabD\xa6\xbe\x91H\xa3\xbe\xf3J\xa0\xbe\xd6K\x9d\xbeFK\x9a\xbeHI\x97\xbe\xeeE\x94\xbe2A\x91\xbe$;\x8e\xbe\xcb3\x8b\xbe2+\x88\xbeb!\x85\xbeb\x16\x82\xbey\x14~\xbe\xf3\xf9w\xbeD\xddq\xbe\x81\xbek\xbe\xb9\x9de\xbe\xffz_\xbehVY\xbe\x130S\xbe\xf4\x07M\xbe,\xdeF\xbe\xd1\xb2@\xbe\xf3\x85:\xbe\xa5W4\xbe\xfc\'.\xbe\n\xf7\'\xbe\xe1\xc4!\xbe\x95\x91\x1b\xbe:]\x15\xbe\xe3\'\x0f\xbe\xa1\xf1\x08\xbe\x8c\xba\x02\xbe\x88\x05\xf9\xbd}\x94\xec\xbd>"\xe0\xbd\xf1\xae\xd3\xbd\xc0:\xc7\xbd\xd2\xc5\xba\xbdPP\xae\xbdb\xda\xa1\xbd1d\x95\xbd\xe4\xed\x88\xbdK\xefx\xbd=\x03`\xbd\xef\x17G\xbd\xb6-.\xbd#E\x15\xbd\x16\xbc\xf8\xbc\x02\xf2\xc6\xbc\xb3,\x95\xbc\xa4\xd9F\xbc \xcc\xc6\xbb\x00\x00\x00\x00 \xcc\xc6;\xa4\xd9F<\xb3,\x95<\x02\xf2\xc6<\x16\xbc\xf8<#E\x15=\xb6-.=\xef\x17G==\x03`=K\xefx=\xe4\xed\x88=1d\x95=b\xda\xa1=PP\xae=\xd2\xc5\xba=\xc0:\xc7=\xf1\xae\xd3=>"\xe0=}\x94\xec=\x88\x05\xf9=\x8c\xba\x02>\xa1\xf1\x08>\xe3\'\x0f>:]\x15>\x95\x91\x1b>\xe1\xc4!>\n\xf7\'>\xfc\'.>\xa5W4>\xf3\x85:>\xd1\xb2@>,\xdeF>\xf4\x07M>\x130S>hVY>\xffz_>\xb9\x9de>\x81\xbek>D\xddq>\xf3\xf9w>y\x14~>b\x16\x82>b!\x85>2+\x88>\xcb3\x8b>$;\x8e>2A\x91>\xeeE\x94>HI\x97>FK\x9a>\xd6K\x9d>\xf3J\xa0>\x91H\xa3>\xabD\xa6>6?\xa9>)8\xac>}/\xaf>*%\xb2>\'\x19\xb5>k\x0b\xb8>\xf0\xfb\xba>\xab\xea\xbd>\x8e\xd7\xc0>\xa1\xc2\xc3>\xd2\xab\xc6>\x1b\x93\xc9>rx\xcc>\xd1[\xcf>/=\xd2>\x85\x1c\xd5>\xca\xf9\xd7>\xf7\xd4\xda>\x05\xae\xdd>\xeb\x84\xe0>\xa1Y\xe3>#,\xe6>]\xfc\xe8>[\xca\xeb>\n\x96\xee>d_\xf1>b&\xf4>\xff\xea\xf6>/\xad\xf9>\xedl\xfc>2*\xff>|\xf2\x00?\x9bN\x02?s\xa9\x03?\x00\x03\x05?;[\x06?\'\xb2\x07?\xc0\x07\t?\xff[\n?\xe1\xae\x0b?e\x00\r?\x86P\x0e?@\x9f\x0f?\x91\xec\x10?t8\x12?\xe8\x82\x13?\xe9\xcb\x14?r\x13\x16?\x82Y\x17?\x11\x9e\x18?#\xe1\x19?\xb3"\x1b?\xbbb\x1c?9\xa1\x1d?*\xde\x1e?\x8d\x19 ?[S!?\x93\x8b"?3\xc2#?6\xf7$?\x9b*&?]\\\'?z\x8c(?\xec\xba)?\xb8\xe7*?\xd4\x12,?A<-?\xfac.?\xfc\x89/?F\xae0?\xd5\xd01?\xa5\xf12?\xb4\x104?\xfe-5?\x82I6?>c7?-{8?J\x919?\x9a\xa5:?\x16\xb8;?\xbb\xc8<?\x87\xd7=?x\xe4>?\x8b\xef??\xbf\xf8@?\x0c\x00B?x\x05C?\xfc\x08D?\x95\nE?B\nF?\x00\x08G?\xce\x03H?\xa8\xfdH?\x8c\xf5I?x\xebJ?j\xdfK?_\xd1L?V\xc1M?M\xafN?@\x9bO?.\x85P?\x14mQ?\xf2RR?\xc46S?\x88\x18T?=\xf8T?\xe1\xd5U?q\xb1V?\xeb\x8aW?NbX?\x977Y?\xc6\nZ?\xd3\xdbZ?\xc5\xaa[?\x95w\\?CB]?\xcd\n^?0\xd1^?l\x95_?}W`?c\x17a?\x1b\xd5a?\xa4\x90b?\xfcIc?#\x01d?\x15\xb6d?\xd2he?W\x19f?\xa6\xc7f?\xb8sg?\x8f\x1dh?)\xc5h?\x85ji?\xa0\rj?y\xaej?\x10Mk?b\xe9k?n\x83l?4\x1bm?\xae\xb0m?\xe2Cn?\xca\xd4n?fco?\xb4\xefo?\xb4yp?e\x01q?\xc4\x86q?\xd0\tr?\x89\x8ar?\xed\x08s?\xfc\x84s?\xb4\xfes?\x14vt?\x1c\xebt?\xc9]u?\x1c\xceu?\x13<v?\xad\xa7v?\xea\x10w?\xc8ww?F\xdcw?f>x?#\x9ex?~\xfbx?vVy?\n\xafy?;\x05z?\x03Yz?h\xaaz?g\xf9z?\x00F{?0\x90{?\xf7\xd7{?U\x1d|?I`|?\xd3\xa0|?\xf2\xde|?\xa6\x1a}?\xeeS}?\xc9\x8a}?8\xbf}?9\xf1}?\xcd ~?\xf2M~?\xa9x~?\xf1\xa0~?\xca\xc6~?3\xea~?,\x0b\x7f?\xb6)\x7f?\xceE\x7f?v_\x7f?\xaev\x7f?t\x8b\x7f?\xc9\x9d\x7f?\xac\xad\x7f?\x1e\xbb\x7f?\x1f\xc6\x7f?\xad\xce\x7f?\xca\xd4\x7f?u\xd8\x7f?\xae\xd9\x7f?u\xd8\x7f?\xca\xd4\x7f?\xad\xce\x7f?\x1f\xc6\x7f?\x1e\xbb\x7f?\xac\xad\x7f?\xc9\x9d\x7f?t\x8b\x7f?\xaev\x7f?v_\x7f?\xceE\x7f?\xb6)\x7f?,\x0b\x7f?2\xea~?\xc9\xc6~?\xf0\xa0~?\xa8x~?\xf1M~?\xcc ~?8\xf1}?8\xbf}?\xc9\x8a}?\xeeS}?\xa6\x1a}?\xf2\xde|?\xd3\xa0|?I`|?U\x1d|?\xf7\xd7{?0\x90{?\x00F{?h\xf9z?j\xaaz?\x03Yz?8\x05z?\x08\xafy?sVy?{\xfbx? \x9ex?c>x?E\xdcw?\xc7ww?\xe9\x10w?\xac\xa7v?\x12<v?\x1b\xceu?\xc7]u?\x19\xebt?\x14vt?\xb4\xfes?\xfc\x84s?\xed\x08s?\x89\x8ar?\xd0\tr?\xc4\x86q?e\x01q?\xb5yp?\xb5\xefo?fco?\xca\xd4n?\xe2Cn?\xae\xb0m?1\x1bm?l\x83l?`\xe9k?\rMk?w\xaej?\x9d\rj?\x81ji?&\xc5h?\x8c\x1dh?\xb5sg?\xa2\xc7f?U\x19f?\xcfhe?\x13\xb6d?#\x01d?\xfcIc?\xa4\x90b?\x1b\xd5a?c\x17a?}W`?l\x95_?1\xd1^?\xce\n^?CB]?\x95w\\?\xc5\xaa[?\xd3\xdbZ?\xc2\nZ?\x947Y?KbX?\xe7\x8aW?l\xb1V?\xdd\xd5U?9\xf8T?\x85\x18T?\xc06S?\xedRR?\x10mQ?)\x85P?;\x9bO?H\xafN?W\xc1M?`\xd1L?j\xdfK?x\xebJ?\x8c\xf5I?\xa8\xfdH?\xce\x03H?\x01\x08G?C\nF?\x95\nE?\xfc\x08D?x\x05C?\r\x00B?\xbd\xf8@?\x8a\xef??v\xe4>?\x85\xd7=?\xb9\xc8<?\x13\xb8;?\x98\xa5:?H\x919?\'{8?8c7?~I6?\xfa-5?\xae\x104?\xa0\xf12?\xd0\xd01?G\xae0?\xfd\x89/?\xfbc.?A<-?\xd4\x12,?\xb8\xe7*?\xed\xba)?x\x8c(?Z\\\'?\x98*&?4\xf7$?0\xc2#?\x91\x8b"?XS!?\x8a\x19 ?(\xde\x1e?7\xa1\x1d?\xb8b\x1c?\xb0"\x1b?!\xe1\x19?\x0f\x9e\x18?|Y\x17?l\x13\x16?\xe3\xcb\x14?\xe3\x82\x13?n8\x12?\x8a\xec\x10?:\x9f\x0f?\x86P\x0e?f\x00\r?\xe2\xae\x0b?\xff[\n?\xc0\x07\t?)\xb2\x07?<[\x06?\xfc\x02\x05?p\xa9\x03?\x98N\x02?z\xf2\x00?,*\xff>\xe7l\xfc>)\xad\xf9>\xf9\xea\xf6>\\&\xf4>^_\xf1>\x04\x96\xee>U\xca\xeb>W\xfc\xe8>\x15,\xe6>\x95Y\xe3>\xde\x84\xe0>\xf7\xad\xdd>\xea\xd4\xda>\xbd\xf9\xd7>x\x1c\xd5>0=\xd2>\xd2[\xcf>tx\xcc>\x1b\x93\xc9>\xd4\xab\xc6>\xa2\xc2\xc3>\x90\xd7\xc0>\xa5\xea\xbd>\xe9\xfb\xba>e\x0b\xb8> \x19\xb5>$%\xb2>w/\xaf>"8\xac>/?\xa9>\xa4D\xa6>\x8cH\xa3>\xedJ\xa0>\xd0K\x9d>?K\x9a>AI\x97>\xdfE\x94>$A\x91>\x15;\x8e>\xbd3\x8b>$+\x88>S!\x85>T\x16\x82>{\x14~>\xf4\xf9w>H\xddq>\x83\xbek>\xbb\x9de>\x03{_>jVY>\x050S>\xe6\x07M>\x1f\xdeF>\xc3\xb2@>\xe6\x85:>\x98W4>\xef\'.>\xfd\xf6\'>\xd4\xc4!>\x88\x91\x1b>-]\x15>\xd5\'\x0f>\x95\xf1\x08>~\xba\x02>N\x05\xf9=B\x94\xec=\x04"\xe0=\xb6\xae\xd3=\x86:\xc7=\x99\xc5\xba=\x16P\xae=g\xda\xa1=5d\x95=\xe9\xed\x88=V\xefx=H\x03`=\xfa\x17G=\xc1-.=\xeeD\x15=\xac\xbb\xf8<\x98\xf1\xc6<I,\x95<\xd0\xd8F<y\xca\xc6;'
#balancewrite:将左右声道平衡转为二进制写入文件
balancewrite={-50: b'\x00\x00\x80\xbf', -49: b'H\xe1z\xbf', -48: b'\x8f\xc2u\xbf', -47: b'\xd7\xa3p\xbf', -46: b'\x1f\x85k\xbf', -45: b'fff\xbf', -44: b'\xaeGa\xbf', -43: b'\xf6(\\\xbf', -42: b'=\nW\xbf', -41: b'\x85\xebQ\xbf', -40: b'\xcd\xccL\xbf', -39: b'\x14\xaeG\xbf', -38: b'\\\x8fB\xbf', -37: b'\xa4p=\xbf', -36: b'\xecQ8\xbf', -35: b'333\xbf', -34: b'{\x14.\xbf', -33: b'\xc3\xf5(\xbf', -32: b'\n\xd7#\xbf', -31: b'R\xb8\x1e\xbf', -30: b'\x9a\x99\x19\xbf', -29: b'\xe1z\x14\xbf', -28: b')\\\x0f\xbf', -27: b'q=\n\xbf', -26: b'\xb8\x1e\x05\xbf', -25: b'\x00\x00\x00\xbf', -24: b'\x8f\xc2\xf5\xbe', -23: b'\x1f\x85\xeb\xbe', -22: b'\xaeG\xe1\xbe', -21: b'=\n\xd7\xbe', -20: b'\xcd\xcc\xcc\xbe', -19: b'\\\x8f\xc2\xbe', -18: b'\xecQ\xb8\xbe', -17: b'{\x14\xae\xbe', -16: b'\n\xd7\xa3\xbe', -15: b'\x9a\x99\x99\xbe', -14: b')\\\x8f\xbe', -13: b'\xb8\x1e\x85\xbe', -12: b'\x8f\xc2u\xbe', -11: b'\xaeGa\xbe', -10: b'\xcd\xccL\xbe', -9: b'\xecQ8\xbe', -8: b'\n\xd7#\xbe', -7: b')\\\x0f\xbe', -6: b'\x8f\xc2\xf5\xbd', -5: b'\xcd\xcc\xcc\xbd', -4: b'\n\xd7\xa3\xbd', -3: b'\x8f\xc2u\xbd', -2: b'\n\xd7#\xbd', -1: b'\n\xd7\xa3\xbc', 0: b'\x00\x00\x00\x00', 1: b'\n\xd7\xa3<', 2: b'\n\xd7#=', 3: b'\x8f\xc2u=', 4: b'\n\xd7\xa3=', 5: b'\xcd\xcc\xcc=', 6: b'\x8f\xc2\xf5=', 7: b')\\\x0f>', 8: b'\n\xd7#>', 9: b'\xecQ8>', 10: b'\xcd\xccL>', 11: b'\xaeGa>', 12: b'\x8f\xc2u>', 13: b'\xb8\x1e\x85>', 14: b')\\\x8f>', 15: b'\x9a\x99\x99>', 16: b'\n\xd7\xa3>', 17: b'{\x14\xae>', 18: b'\xecQ\xb8>', 19: b'\\\x8f\xc2>', 20: b'\xcd\xcc\xcc>', 21: b'=\n\xd7>', 22: b'\xaeG\xe1>', 23: b'\x1f\x85\xeb>', 24: b'\x8f\xc2\xf5>', 25: b'\x00\x00\x00?', 26: b'\xb8\x1e\x05?', 27: b'q=\n?', 28: b')\\\x0f?', 29: b'\xe1z\x14?', 30: b'\x9a\x99\x19?', 31: b'R\xb8\x1e?', 32: b'\n\xd7#?', 33: b'\xc3\xf5(?', 34: b'{\x14.?', 35: b'333?', 36: b'\xecQ8?', 37: b'\xa4p=?', 38: b'\\\x8fB?', 39: b'\x14\xaeG?', 40: b'\xcd\xccL?', 41: b'\x85\xebQ?', 42: b'=\nW?', 43: b'\xf6(\\?', 44: b'\xaeGa?', 45: b'fff?', 46: b'\x1f\x85k?', 47: b'\xd7\xa3p?', 48: b'\x8f\xc2u?', 49: b'H\xe1z?', 50: b'\x00\x00\x80?'}
#balanceread:解析文件中的二进制左右声道平衡数据
balanceread={b'\x00\x00\x80\xbf': -50, b'H\xe1z\xbf': -49, b'\x8f\xc2u\xbf': -48, b'\xd7\xa3p\xbf': -47, b'\x1f\x85k\xbf': -46, b'fff\xbf': -45, b'\xaeGa\xbf': -44, b'\xf6(\\\xbf': -43, b'=\nW\xbf': -42, b'\x85\xebQ\xbf': -41, b'\xcd\xccL\xbf': -40, b'\x14\xaeG\xbf': -39, b'\\\x8fB\xbf': -38, b'\xa4p=\xbf': -37, b'\xecQ8\xbf': -36, b'333\xbf': -35, b'{\x14.\xbf': -34, b'\xc3\xf5(\xbf': -33, b'\n\xd7#\xbf': -32, b'R\xb8\x1e\xbf': -31, b'\x9a\x99\x19\xbf': -30, b'\xe1z\x14\xbf': -29, b')\\\x0f\xbf': -28, b'q=\n\xbf': -27, b'\xb8\x1e\x05\xbf': -26, b'\x00\x00\x00\xbf': -25, b'\x8f\xc2\xf5\xbe': -24, b'\x1f\x85\xeb\xbe': -23, b'\xaeG\xe1\xbe': -22, b'=\n\xd7\xbe': -21, b'\xcd\xcc\xcc\xbe': -20, b'\\\x8f\xc2\xbe': -19, b'\xecQ\xb8\xbe': -18, b'{\x14\xae\xbe': -17, b'\n\xd7\xa3\xbe': -16, b'\x9a\x99\x99\xbe': -15, b')\\\x8f\xbe': -14, b'\xb8\x1e\x85\xbe': -13, b'\x8f\xc2u\xbe': -12, b'\xaeGa\xbe': -11, b'\xcd\xccL\xbe': -10, b'\xecQ8\xbe': -9, b'\n\xd7#\xbe': -8, b')\\\x0f\xbe': -7, b'\x8f\xc2\xf5\xbd': -6, b'\xcd\xcc\xcc\xbd': -5, b'\n\xd7\xa3\xbd': -4, b'\x8f\xc2u\xbd': -3, b'\n\xd7#\xbd': -2, b'\n\xd7\xa3\xbc': -1, b'\x00\x00\x00\x00': 0, b'\n\xd7\xa3<': 1, b'\n\xd7#=': 2, b'\x8f\xc2u=': 3, b'\n\xd7\xa3=': 4, b'\xcd\xcc\xcc=': 5, b'\x8f\xc2\xf5=': 6, b')\\\x0f>': 7, b'\n\xd7#>': 8, b'\xecQ8>': 9, b'\xcd\xccL>': 10, b'\xaeGa>': 11, b'\x8f\xc2u>': 12, b'\xb8\x1e\x85>': 13, b')\\\x8f>': 14, b'\x9a\x99\x99>': 15, b'\n\xd7\xa3>': 16, b'{\x14\xae>': 17, b'\xecQ\xb8>': 18, b'\\\x8f\xc2>': 19, b'\xcd\xcc\xcc>': 20, b'=\n\xd7>': 21, b'\xaeG\xe1>': 22, b'\x1f\x85\xeb>': 23, b'\x8f\xc2\xf5>': 24, b'\x00\x00\x00?': 25, b'\xb8\x1e\x05?': 26, b'q=\n?': 27, b')\\\x0f?': 28, b'\xe1z\x14?': 29, b'\x9a\x99\x19?': 30, b'R\xb8\x1e?': 31, b'\n\xd7#?': 32, b'\xc3\xf5(?': 33, b'{\x14.?': 34, b'333?': 35, b'\xecQ8?': 36, b'\xa4p=?': 37, b'\\\x8fB?': 38, b'\x14\xaeG?': 39, b'\xcd\xccL?': 40, b'\x85\xebQ?': 41, b'=\nW?': 42, b'\xf6(\\?': 43, b'\xaeGa?': 44, b'fff?': 45, b'\x1f\x85k?': 46, b'\xd7\xa3p?': 47, b'\x8f\xc2u?': 48, b'H\xe1z?': 49, b'\x00\x00\x80?': 50}
| 2,134.428571
| 10,701
| 0.646744
| 3,318
| 14,941
| 2.906872
| 0.153406
| 0.011612
| 0.014515
| 0.009953
| 0.086366
| 0.011198
| 0
| 0
| 0
| 0
| 0
| 0.176037
| 0.028579
| 14,941
| 6
| 10,702
| 2,490.166667
| 0.488494
| 0.005689
| 0
| 0
| 0
| 2.666667
| 0.781505
| 0.629083
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7b32d617abfdad23415c813d050e32089b914c82
| 68,286
|
py
|
Python
|
pyboto3/transfer.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 91
|
2016-12-31T11:38:37.000Z
|
2021-09-16T19:33:23.000Z
|
pyboto3/transfer.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 7
|
2017-01-02T18:54:23.000Z
|
2020-08-11T13:54:02.000Z
|
pyboto3/transfer.py
|
gehad-shaat/pyboto3
|
4a0c2851a8bc04fb1c71c36086f7bb257e48181d
|
[
"MIT"
] | 26
|
2016-12-31T13:11:00.000Z
|
2022-03-03T21:01:12.000Z
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def create_server(Certificate=None, EndpointDetails=None, EndpointType=None, HostKey=None, IdentityProviderDetails=None, IdentityProviderType=None, LoggingRole=None, Protocols=None, Tags=None):
"""
Instantiates an autoscaling virtual server based on the selected file transfer protocol in AWS. When you make updates to your file transfer protocol-enabled server or when you work with users, use the service-generated ServerId property that is assigned to the newly created server.
See also: AWS API Documentation
Exceptions
:example: response = client.create_server(
Certificate='string',
EndpointDetails={
'AddressAllocationIds': [
'string',
],
'SubnetIds': [
'string',
],
'VpcEndpointId': 'string',
'VpcId': 'string'
},
EndpointType='PUBLIC'|'VPC'|'VPC_ENDPOINT',
HostKey='string',
IdentityProviderDetails={
'Url': 'string',
'InvocationRole': 'string'
},
IdentityProviderType='SERVICE_MANAGED'|'API_GATEWAY',
LoggingRole='string',
Protocols=[
'SFTP'|'FTP'|'FTPS',
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type Certificate: string
:param Certificate: The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. Required when Protocols is set to FTPS .
:type EndpointDetails: dict
:param EndpointDetails: The virtual private cloud (VPC) endpoint settings that are configured for your file transfer protocol-enabled server. When you host your endpoint within your VPC, you can make it accessible only to resources within your VPC, or you can attach Elastic IPs and make it accessible to clients over the internet. Your VPC\'s default security groups are automatically assigned to your endpoint.\n\nAddressAllocationIds (list) --A list of address allocation IDs that are required to attach an Elastic IP address to your file transfer protocol-enabled server\'s endpoint. This is only valid in the UpdateServer API.\n\nNote\nThis property can only be use when EndpointType is set to VPC .\n\n\n(string) --\n\n\nSubnetIds (list) --A list of subnet IDs that are required to host your file transfer protocol-enabled server endpoint in your VPC.\n\n(string) --\n\n\nVpcEndpointId (string) --The ID of the VPC endpoint.\n\nVpcId (string) --The VPC ID of the VPC in which a file transfer protocol-enabled server\'s endpoint will be hosted.\n\n\n
:type EndpointType: string
:param EndpointType: The type of VPC endpoint that you want your file transfer protocol-enabled server to connect to. You can choose to connect to the public internet or a virtual private cloud (VPC) endpoint. With a VPC endpoint, you can restrict access to your server and resources only within your VPC.
:type HostKey: string
:param HostKey: The RSA private key as generated by the ssh-keygen -N '' -f my-new-server-key command.\n\nWarning\nIf you aren\'t planning to migrate existing users from an existing SFTP-enabled server to a new server, don\'t update the host key. Accidentally changing a server\'s host key can be disruptive.\n\nFor more information, see Changing the Host Key for Your AWS Transfer Family Server in the AWS Transfer Family User Guide .\n
:type IdentityProviderDetails: dict
:param IdentityProviderDetails: Required when IdentityProviderType is set to API_GATEWAY . Accepts an array containing all of the information required to call a customer-supplied authentication API, including the API Gateway URL. Not required when IdentityProviderType is set to SERVICE_MANAGED .\n\nUrl (string) --Contains the location of the service endpoint used to authenticate users.\n\nInvocationRole (string) --Provides the type of InvocationRole used to authenticate the user account.\n\n\n
:type IdentityProviderType: string
:param IdentityProviderType: Specifies the mode of authentication for a file transfer protocol-enabled server. The default value is SERVICE_MANAGED , which allows you to store and access user credentials within the AWS Transfer Family service. Use the API_GATEWAY value to integrate with an identity provider of your choosing. The API_GATEWAY setting requires you to provide an API Gateway endpoint URL to call for authentication using the IdentityProviderDetails parameter.
:type LoggingRole: string
:param LoggingRole: Allows the service to write your users\' activity to your Amazon CloudWatch logs for monitoring and auditing purposes.
:type Protocols: list
:param Protocols: Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server\'s endpoint. The available protocols are:\n\nSecure Shell (SSH) File Transfer Protocol (SFTP): File transfer over SSH\nFile Transfer Protocol Secure (FTPS): File transfer with TLS encryption\nFile Transfer Protocol (FTP): Unencrypted file transfer\n\n\n(string) --\n\n
:type Tags: list
:param Tags: Key-value pairs that can be used to group and search for file transfer protocol-enabled servers.\n\n(dict) --Creates a key-value pair for a specific resource. Tags are metadata that you can use to search for and group a resource for various purposes. You can apply tags to servers, users, and roles. A tag key can take more than one value. For example, to group servers for accounting purposes, you might create a tag called Group and assign the values Research and Accounting to that group.\n\nKey (string) -- [REQUIRED]The name assigned to the tag that you create.\n\nValue (string) -- [REQUIRED]Contains one or more values that you assigned to the key name you create.\n\n\n\n\n
:rtype: dict
ReturnsResponse Syntax
{
'ServerId': 'string'
}
Response Structure
(dict) --
ServerId (string) --
The service-assigned ID of the file transfer protocol-enabled server that is created.
Exceptions
Transfer.Client.exceptions.AccessDeniedException
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ThrottlingException
:return: {
'ServerId': 'string'
}
:returns:
Transfer.Client.exceptions.AccessDeniedException
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ThrottlingException
"""
pass
def create_user(HomeDirectory=None, HomeDirectoryType=None, HomeDirectoryMappings=None, Policy=None, Role=None, ServerId=None, SshPublicKeyBody=None, Tags=None, UserName=None):
"""
Creates a user and associates them with an existing file transfer protocol-enabled server. You can only create and associate users with servers that have the IdentityProviderType set to SERVICE_MANAGED . Using parameters for CreateUser , you can specify the user name, set the home directory, store the user\'s public key, and assign the user\'s AWS Identity and Access Management (IAM) role. You can also optionally add a scope-down policy, and assign metadata with tags that can be used to group and search for users.
See also: AWS API Documentation
Exceptions
:example: response = client.create_user(
HomeDirectory='string',
HomeDirectoryType='PATH'|'LOGICAL',
HomeDirectoryMappings=[
{
'Entry': 'string',
'Target': 'string'
},
],
Policy='string',
Role='string',
ServerId='string',
SshPublicKeyBody='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
UserName='string'
)
:type HomeDirectory: string
:param HomeDirectory: The landing directory (folder) for a user when they log in to the file transfer protocol-enabled server using the client.\nAn example is your-Amazon-S3-bucket-name>/home/username .\n
:type HomeDirectoryType: string
:param HomeDirectoryType: The type of landing directory (folder) you want your users\' home directory to be when they log into the file transfer protocol-enabled server. If you set it to PATH , the user will see the absolute Amazon S3 bucket paths as is in their file transfer protocol clients. If you set it LOGICAL , you will need to provide mappings in the HomeDirectoryMappings for how you want to make Amazon S3 paths visible to your users.
:type HomeDirectoryMappings: list
:param HomeDirectoryMappings: Logical directory mappings that specify what Amazon S3 paths and keys should be visible to your user and how you want to make them visible. You will need to specify the 'Entry ' and 'Target ' pair, where Entry shows how the path is made visible and Target is the actual Amazon S3 path. If you only specify a target, it will be displayed as is. You will need to also make sure that your AWS IAM Role provides access to paths in Target . The following is an example.\n\n\'[ '/bucket2/documentation', { 'Entry': 'your-personal-report.pdf', 'Target': '/bucket3/customized-reports/${transfer:UserName}.pdf' } ]\'\nIn most cases, you can use this value instead of the scope-down policy to lock your user down to the designated home directory ('chroot'). To do this, you can set Entry to \'/\' and set Target to the HomeDirectory parameter value.\n\nNote\nIf the target of a logical directory entry does not exist in Amazon S3, the entry will be ignored. As a workaround, you can use the Amazon S3 api to create 0 byte objects as place holders for your directory. If using the CLI, use the s3api call instead of s3 so you can use the put-object operation. For example, you use the following: aws s3api put-object --bucket bucketname --key path/to/folder/ . Make sure that the end of the key name ends in a \'/\' for it to be considered a folder.\n\n\n(dict) --Represents an object that contains entries and a targets for HomeDirectoryMappings .\n\nEntry (string) -- [REQUIRED]Represents an entry and a target for HomeDirectoryMappings .\n\nTarget (string) -- [REQUIRED]Represents the map target that is used in a HomeDirectorymapEntry .\n\n\n\n\n
:type Policy: string
:param Policy: A scope-down policy for your user so you can use the same IAM role across multiple users. This policy scopes down user access to portions of their Amazon S3 bucket. Variables that you can use inside this policy include ${Transfer:UserName} , ${Transfer:HomeDirectory} , and ${Transfer:HomeBucket} .\n\nNote\nFor scope-down policies, AWS Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the Policy argument.\nFor an example of a scope-down policy, see Creating a Scope-Down Policy .\nFor more information, see AssumeRole in the AWS Security Token Service API Reference .\n\n
:type Role: string
:param Role: [REQUIRED]\nThe IAM role that controls your users\' access to your Amazon S3 bucket. The policies attached to this role will determine the level of access you want to provide your users when transferring files into and out of your Amazon S3 bucket or buckets. The IAM role should also contain a trust relationship that allows the file transfer protocol-enabled server to access your resources when servicing your users\' transfer requests.\n
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server instance. This is the specific server that you added your user to.\n
:type SshPublicKeyBody: string
:param SshPublicKeyBody: The public portion of the Secure Shell (SSH) key used to authenticate the user to the file transfer protocol-enabled server.
:type Tags: list
:param Tags: Key-value pairs that can be used to group and search for users. Tags are metadata attached to users for any purpose.\n\n(dict) --Creates a key-value pair for a specific resource. Tags are metadata that you can use to search for and group a resource for various purposes. You can apply tags to servers, users, and roles. A tag key can take more than one value. For example, to group servers for accounting purposes, you might create a tag called Group and assign the values Research and Accounting to that group.\n\nKey (string) -- [REQUIRED]The name assigned to the tag that you create.\n\nValue (string) -- [REQUIRED]Contains one or more values that you assigned to the key name you create.\n\n\n\n\n
:type UserName: string
:param UserName: [REQUIRED]\nA unique string that identifies a user and is associated with a file transfer protocol-enabled server as specified by the ServerId . This user name must be a minimum of 3 and a maximum of 32 characters long. The following are valid characters: a-z, A-Z, 0-9, underscore, and hyphen. The user name can\'t start with a hyphen.\n
:rtype: dict
ReturnsResponse Syntax
{
'ServerId': 'string',
'UserName': 'string'
}
Response Structure
(dict) --
ServerId (string) --
The ID of the file transfer protocol-enabled server that the user is attached to.
UserName (string) --
A unique string that identifies a user account associated with a file transfer protocol-enabled server.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ResourceNotFoundException
:return: {
'ServerId': 'string',
'UserName': 'string'
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def delete_server(ServerId=None):
"""
Deletes the file transfer protocol-enabled server that you specify.
No response returns from this operation.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_server(
ServerId='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA unique system-assigned identifier for a file transfer protocol-enabled server instance.\n
"""
pass
def delete_ssh_public_key(ServerId=None, SshPublicKeyId=None, UserName=None):
"""
Deletes a user\'s Secure Shell (SSH) public key.
No response is returned from this operation.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_ssh_public_key(
ServerId='string',
SshPublicKeyId='string',
UserName='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server instance that has the user assigned to it.\n
:type SshPublicKeyId: string
:param SshPublicKeyId: [REQUIRED]\nA unique identifier used to reference your user\'s specific SSH key.\n
:type UserName: string
:param UserName: [REQUIRED]\nA unique string that identifies a user whose public key is being deleted.\n
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
"""
pass
def delete_user(ServerId=None, UserName=None):
"""
Deletes the user belonging to a file transfer protocol-enabled server you specify.
No response returns from this operation.
See also: AWS API Documentation
Exceptions
:example: response = client.delete_user(
ServerId='string',
UserName='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server instance that has the user assigned to it.\n
:type UserName: string
:param UserName: [REQUIRED]\nA unique string that identifies a user that is being deleted from a file transfer protocol-enabled server.\n
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def describe_server(ServerId=None):
"""
Describes a file transfer protocol-enabled server that you specify by passing the ServerId parameter.
The response contains a description of a server\'s properties. When you set EndpointType to VPC, the response will contain the EndpointDetails .
See also: AWS API Documentation
Exceptions
:example: response = client.describe_server(
ServerId='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server.\n
:rtype: dict
ReturnsResponse Syntax{
'Server': {
'Arn': 'string',
'Certificate': 'string',
'EndpointDetails': {
'AddressAllocationIds': [
'string',
],
'SubnetIds': [
'string',
],
'VpcEndpointId': 'string',
'VpcId': 'string'
},
'EndpointType': 'PUBLIC'|'VPC'|'VPC_ENDPOINT',
'HostKeyFingerprint': 'string',
'IdentityProviderDetails': {
'Url': 'string',
'InvocationRole': 'string'
},
'IdentityProviderType': 'SERVICE_MANAGED'|'API_GATEWAY',
'LoggingRole': 'string',
'Protocols': [
'SFTP'|'FTP'|'FTPS',
],
'ServerId': 'string',
'State': 'OFFLINE'|'ONLINE'|'STARTING'|'STOPPING'|'START_FAILED'|'STOP_FAILED',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'UserCount': 123
}
}
Response Structure
(dict) --
Server (dict) --An array containing the properties of a file transfer protocol-enabled server with the ServerID you specified.
Arn (string) --Specifies the unique Amazon Resource Name (ARN) for a file transfer protocol-enabled server to be described.
Certificate (string) --The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. Required when Protocols is set to FTPS .
EndpointDetails (dict) --The virtual private cloud (VPC) endpoint settings that you configured for your file transfer protocol-enabled server.
AddressAllocationIds (list) --A list of address allocation IDs that are required to attach an Elastic IP address to your file transfer protocol-enabled server\'s endpoint. This is only valid in the UpdateServer API.
Note
This property can only be use when EndpointType is set to VPC .
(string) --
SubnetIds (list) --A list of subnet IDs that are required to host your file transfer protocol-enabled server endpoint in your VPC.
(string) --
VpcEndpointId (string) --The ID of the VPC endpoint.
VpcId (string) --The VPC ID of the VPC in which a file transfer protocol-enabled server\'s endpoint will be hosted.
EndpointType (string) --The type of endpoint that your file transfer protocol-enabled server is connected to. If your server is connected to a VPC endpoint, your server isn\'t accessible over the public internet.
HostKeyFingerprint (string) --Contains the message-digest algorithm (MD5) hash of a file transfer protocol-enabled server\'s host key. This value is equivalent to the output of the ssh-keygen -l -E md5 -f my-new-server-key command.
IdentityProviderDetails (dict) --Specifies information to call a customer-supplied authentication API. This field is not populated when the IdentityProviderType of a file transfer protocol-enabled server is SERVICE_MANAGED .
Url (string) --Contains the location of the service endpoint used to authenticate users.
InvocationRole (string) --Provides the type of InvocationRole used to authenticate the user account.
IdentityProviderType (string) --Defines the mode of authentication method enabled for this service. A value of SERVICE_MANAGED means that you are using this file transfer protocol-enabled server to store and access user credentials within the service. A value of API_GATEWAY indicates that you have integrated an API Gateway endpoint that will be invoked for authenticating your user into the service.
LoggingRole (string) --An AWS Identity and Access Management (IAM) entity that allows a file transfer protocol-enabled server to turn on Amazon CloudWatch logging for Amazon S3 events. When set, user activity can be viewed in your CloudWatch logs.
Protocols (list) --Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server\'s endpoint. The available protocols are:
Secure Shell (SSH) File Transfer Protocol (SFTP): File transfer over SSH
File Transfer Protocol Secure (FTPS): File transfer with TLS encryption
File Transfer Protocol (FTP): Unencrypted file transfer
(string) --
ServerId (string) --Unique system-assigned identifier for a file transfer protocol-enabled server that you instantiate.
State (string) --The condition of a file transfer protocol-enabled server for the server that was described. A value of ONLINE indicates that the server can accept jobs and transfer files. A State value of OFFLINE means that the server cannot perform file transfer operations.
The states of STARTING and STOPPING indicate that the server is in an intermediate state, either not fully able to respond, or not fully offline. The values of START_FAILED or STOP_FAILED can indicate an error condition.
Tags (list) --Contains the key-value pairs that you can use to search for and group file transfer protocol-enabled servers that were assigned to the server that was described.
(dict) --Creates a key-value pair for a specific resource. Tags are metadata that you can use to search for and group a resource for various purposes. You can apply tags to servers, users, and roles. A tag key can take more than one value. For example, to group servers for accounting purposes, you might create a tag called Group and assign the values Research and Accounting to that group.
Key (string) --The name assigned to the tag that you create.
Value (string) --Contains one or more values that you assigned to the key name you create.
UserCount (integer) --The number of users that are assigned to a file transfer protocol-enabled server you specified with the ServerId .
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
:return: {
'Server': {
'Arn': 'string',
'Certificate': 'string',
'EndpointDetails': {
'AddressAllocationIds': [
'string',
],
'SubnetIds': [
'string',
],
'VpcEndpointId': 'string',
'VpcId': 'string'
},
'EndpointType': 'PUBLIC'|'VPC'|'VPC_ENDPOINT',
'HostKeyFingerprint': 'string',
'IdentityProviderDetails': {
'Url': 'string',
'InvocationRole': 'string'
},
'IdentityProviderType': 'SERVICE_MANAGED'|'API_GATEWAY',
'LoggingRole': 'string',
'Protocols': [
'SFTP'|'FTP'|'FTPS',
],
'ServerId': 'string',
'State': 'OFFLINE'|'ONLINE'|'STARTING'|'STOPPING'|'START_FAILED'|'STOP_FAILED',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'UserCount': 123
}
}
:returns:
(string) --
"""
pass
def describe_user(ServerId=None, UserName=None):
"""
Describes the user assigned to the specific file transfer protocol-enabled server, as identified by its ServerId property.
The response from this call returns the properties of the user associated with the ServerId value that was specified.
See also: AWS API Documentation
Exceptions
:example: response = client.describe_user(
ServerId='string',
UserName='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server that has this user assigned.\n
:type UserName: string
:param UserName: [REQUIRED]\nThe name of the user assigned to one or more file transfer protocol-enabled servers. User names are part of the sign-in credentials to use the AWS Transfer Family service and perform file transfer tasks.\n
:rtype: dict
ReturnsResponse Syntax
{
'ServerId': 'string',
'User': {
'Arn': 'string',
'HomeDirectory': 'string',
'HomeDirectoryMappings': [
{
'Entry': 'string',
'Target': 'string'
},
],
'HomeDirectoryType': 'PATH'|'LOGICAL',
'Policy': 'string',
'Role': 'string',
'SshPublicKeys': [
{
'DateImported': datetime(2015, 1, 1),
'SshPublicKeyBody': 'string',
'SshPublicKeyId': 'string'
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'UserName': 'string'
}
}
Response Structure
(dict) --
ServerId (string) --
A system-assigned unique identifier for a file transfer protocol-enabled server that has this user assigned.
User (dict) --
An array containing the properties of the user account for the ServerID value that you specified.
Arn (string) --
Contains the unique Amazon Resource Name (ARN) for the user that was requested to be described.
HomeDirectory (string) --
Specifies the landing directory (or folder), which is the location that files are written to or read from in an Amazon S3 bucket for the described user. An example is ``/your s3 bucket name /home/username `` .
HomeDirectoryMappings (list) --
Logical directory mappings that you specified for what Amazon S3 paths and keys should be visible to your user and how you want to make them visible. You will need to specify the "Entry " and "Target " pair, where Entry shows how the path is made visible and Target is the actual Amazon S3 path. If you only specify a target, it will be displayed as is. You will need to also make sure that your AWS IAM Role provides access to paths in Target .
In most cases, you can use this value instead of the scope-down policy to lock your user down to the designated home directory ("chroot"). To do this, you can set Entry to \'/\' and set Target to the HomeDirectory parameter value.
(dict) --
Represents an object that contains entries and a targets for HomeDirectoryMappings .
Entry (string) --
Represents an entry and a target for HomeDirectoryMappings .
Target (string) --
Represents the map target that is used in a HomeDirectorymapEntry .
HomeDirectoryType (string) --
The type of landing directory (folder) you mapped for your users to see when they log into the file transfer protocol-enabled server. If you set it to PATH , the user will see the absolute Amazon S3 bucket paths as is in their file transfer protocol clients. If you set it LOGICAL , you will need to provide mappings in the HomeDirectoryMappings for how you want to make Amazon S3 paths visible to your users.
Policy (string) --
Specifies the name of the policy in use for the described user.
Role (string) --
Specifies the IAM role that controls your users\' access to your Amazon S3 bucket. The policies attached to this role will determine the level of access you want to provide your users when transferring files into and out of your Amazon S3 bucket or buckets. The IAM role should also contain a trust relationship that allows a file transfer protocol-enabled server to access your resources when servicing your users\' transfer requests.
SshPublicKeys (list) --
Contains the public key portion of the Secure Shell (SSH) keys stored for the described user.
(dict) --
Provides information about the public Secure Shell (SSH) key that is associated with a user account for the specific file transfer protocol-enabled server (as identified by ServerId ). The information returned includes the date the key was imported, the public key contents, and the public key ID. A user can store more than one SSH public key associated with their user name on a specific server.
DateImported (datetime) --
The date that the public key was added to the user account.
SshPublicKeyBody (string) --
The content of the SSH public key as specified by the PublicKeyId .
SshPublicKeyId (string) --
The SshPublicKeyId parameter contains the identifier of the public key.
Tags (list) --
Contains the key-value pairs for the user requested. Tag can be used to search for and group users for a variety of purposes.
(dict) --
Creates a key-value pair for a specific resource. Tags are metadata that you can use to search for and group a resource for various purposes. You can apply tags to servers, users, and roles. A tag key can take more than one value. For example, to group servers for accounting purposes, you might create a tag called Group and assign the values Research and Accounting to that group.
Key (string) --
The name assigned to the tag that you create.
Value (string) --
Contains one or more values that you assigned to the key name you create.
UserName (string) --
The name of the user that was requested to be described. User names are used for authentication purposes. This is the string that will be used by your user when they log in to your file transfer protocol-enabled server.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
:return: {
'ServerId': 'string',
'User': {
'Arn': 'string',
'HomeDirectory': 'string',
'HomeDirectoryMappings': [
{
'Entry': 'string',
'Target': 'string'
},
],
'HomeDirectoryType': 'PATH'|'LOGICAL',
'Policy': 'string',
'Role': 'string',
'SshPublicKeys': [
{
'DateImported': datetime(2015, 1, 1),
'SshPublicKeyBody': 'string',
'SshPublicKeyId': 'string'
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'UserName': 'string'
}
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def import_ssh_public_key(ServerId=None, SshPublicKeyBody=None, UserName=None):
"""
Adds a Secure Shell (SSH) public key to a user account identified by a UserName value assigned to the specific file transfer protocol-enabled server, identified by ServerId .
The response returns the UserName value, the ServerId value, and the name of the SshPublicKeyId .
See also: AWS API Documentation
Exceptions
:example: response = client.import_ssh_public_key(
ServerId='string',
SshPublicKeyBody='string',
UserName='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server.\n
:type SshPublicKeyBody: string
:param SshPublicKeyBody: [REQUIRED]\nThe public key portion of an SSH key pair.\n
:type UserName: string
:param UserName: [REQUIRED]\nThe name of the user account that is assigned to one or more file transfer protocol-enabled servers.\n
:rtype: dict
ReturnsResponse Syntax
{
'ServerId': 'string',
'SshPublicKeyId': 'string',
'UserName': 'string'
}
Response Structure
(dict) --
Identifies the user, the file transfer protocol-enabled server they belong to, and the identifier of the SSH public key associated with that user. A user can have more than one key on each server that they are associated with.
ServerId (string) --
A system-assigned unique identifier for a file transfer protocol-enabled server.
SshPublicKeyId (string) --
The name given to a public key by the system that was imported.
UserName (string) --
A user name assigned to the ServerID value that you specified.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
:return: {
'ServerId': 'string',
'SshPublicKeyId': 'string',
'UserName': 'string'
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
"""
pass
def list_servers(MaxResults=None, NextToken=None):
"""
Lists the file transfer protocol-enabled servers that are associated with your AWS account.
See also: AWS API Documentation
Exceptions
:example: response = client.list_servers(
MaxResults=123,
NextToken='string'
)
:type MaxResults: integer
:param MaxResults: Specifies the number of file transfer protocol-enabled servers to return as a response to the ListServers query.
:type NextToken: string
:param NextToken: When additional results are obtained from the``ListServers`` command, a NextToken parameter is returned in the output. You can then pass the NextToken parameter in a subsequent command to continue listing additional file transfer protocol-enabled servers.
:rtype: dict
ReturnsResponse Syntax
{
'NextToken': 'string',
'Servers': [
{
'Arn': 'string',
'IdentityProviderType': 'SERVICE_MANAGED'|'API_GATEWAY',
'EndpointType': 'PUBLIC'|'VPC'|'VPC_ENDPOINT',
'LoggingRole': 'string',
'ServerId': 'string',
'State': 'OFFLINE'|'ONLINE'|'STARTING'|'STOPPING'|'START_FAILED'|'STOP_FAILED',
'UserCount': 123
},
]
}
Response Structure
(dict) --
NextToken (string) --
When you can get additional results from the ListServers operation, a NextToken parameter is returned in the output. In a following command, you can pass in the NextToken parameter to continue listing additional file transfer protocol-enabled servers.
Servers (list) --
An array of file transfer protocol-enabled servers that were listed.
(dict) --
Returns properties of a file transfer protocol-enabled server that was specified.
Arn (string) --
The unique Amazon Resource Name (ARN) for a file transfer protocol-enabled server to be listed.
IdentityProviderType (string) --
The authentication method used to validate a user for a file transfer protocol-enabled server that was specified. This can include Secure Shell (SSH), user name and password combinations, or your own custom authentication method. Valid values include SERVICE_MANAGED or API_GATEWAY .
EndpointType (string) --
The type of VPC endpoint that your file transfer protocol-enabled server is connected to. If your server is connected to a VPC endpoint, your server isn\'t accessible over the public internet.
LoggingRole (string) --
The AWS Identity and Access Management (IAM) entity that allows a file transfer protocol-enabled server to turn on Amazon CloudWatch logging.
ServerId (string) --
The unique system assigned identifier for a file transfer protocol-enabled servers that were listed.
State (string) --
Describes the condition of a file transfer protocol-enabled server for the server that was described. A value of ONLINE indicates that the server can accept jobs and transfer files. A State value of OFFLINE means that the server cannot perform file transfer operations.
The states of STARTING and STOPPING indicate that the server is in an intermediate state, either not fully able to respond, or not fully offline. The values of START_FAILED or STOP_FAILED can indicate an error condition.
UserCount (integer) --
A numeric value that indicates the number of users that are assigned to a file transfer protocol-enabled server you specified with the ServerId .
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidNextTokenException
Transfer.Client.exceptions.InvalidRequestException
:return: {
'NextToken': 'string',
'Servers': [
{
'Arn': 'string',
'IdentityProviderType': 'SERVICE_MANAGED'|'API_GATEWAY',
'EndpointType': 'PUBLIC'|'VPC'|'VPC_ENDPOINT',
'LoggingRole': 'string',
'ServerId': 'string',
'State': 'OFFLINE'|'ONLINE'|'STARTING'|'STOPPING'|'START_FAILED'|'STOP_FAILED',
'UserCount': 123
},
]
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidNextTokenException
Transfer.Client.exceptions.InvalidRequestException
"""
pass
def list_tags_for_resource(Arn=None, MaxResults=None, NextToken=None):
"""
Lists all of the tags associated with the Amazon Resource Number (ARN) you specify. The resource can be a user, server, or role.
See also: AWS API Documentation
Exceptions
:example: response = client.list_tags_for_resource(
Arn='string',
MaxResults=123,
NextToken='string'
)
:type Arn: string
:param Arn: [REQUIRED]\nRequests the tags associated with a particular Amazon Resource Name (ARN). An ARN is an identifier for a specific AWS resource, such as a server, user, or role.\n
:type MaxResults: integer
:param MaxResults: Specifies the number of tags to return as a response to the ListTagsForResource request.
:type NextToken: string
:param NextToken: When you request additional results from the ListTagsForResource operation, a NextToken parameter is returned in the input. You can then pass in a subsequent command to the NextToken parameter to continue listing additional tags.
:rtype: dict
ReturnsResponse Syntax
{
'Arn': 'string',
'NextToken': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
Response Structure
(dict) --
Arn (string) --
The ARN you specified to list the tags of.
NextToken (string) --
When you can get additional results from the ListTagsForResource call, a NextToken parameter is returned in the output. You can then pass in a subsequent command to the NextToken parameter to continue listing additional tags.
Tags (list) --
Key-value pairs that are assigned to a resource, usually for the purpose of grouping and searching for items. Tags are metadata that you define.
(dict) --
Creates a key-value pair for a specific resource. Tags are metadata that you can use to search for and group a resource for various purposes. You can apply tags to servers, users, and roles. A tag key can take more than one value. For example, to group servers for accounting purposes, you might create a tag called Group and assign the values Research and Accounting to that group.
Key (string) --
The name assigned to the tag that you create.
Value (string) --
Contains one or more values that you assigned to the key name you create.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidNextTokenException
Transfer.Client.exceptions.InvalidRequestException
:return: {
'Arn': 'string',
'NextToken': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidNextTokenException
Transfer.Client.exceptions.InvalidRequestException
"""
pass
def list_users(MaxResults=None, NextToken=None, ServerId=None):
"""
Lists the users for a file transfer protocol-enabled server that you specify by passing the ServerId parameter.
See also: AWS API Documentation
Exceptions
:example: response = client.list_users(
MaxResults=123,
NextToken='string',
ServerId='string'
)
:type MaxResults: integer
:param MaxResults: Specifies the number of users to return as a response to the ListUsers request.
:type NextToken: string
:param NextToken: When you can get additional results from the ListUsers call, a NextToken parameter is returned in the output. You can then pass in a subsequent command to the NextToken parameter to continue listing additional users.
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server that has users assigned to it.\n
:rtype: dict
ReturnsResponse Syntax
{
'NextToken': 'string',
'ServerId': 'string',
'Users': [
{
'Arn': 'string',
'HomeDirectory': 'string',
'HomeDirectoryType': 'PATH'|'LOGICAL',
'Role': 'string',
'SshPublicKeyCount': 123,
'UserName': 'string'
},
]
}
Response Structure
(dict) --
NextToken (string) --
When you can get additional results from the ListUsers call, a NextToken parameter is returned in the output. You can then pass in a subsequent command to the NextToken parameter to continue listing additional users.
ServerId (string) --
A system-assigned unique identifier for a file transfer protocol-enabled server that the users are assigned to.
Users (list) --
Returns the user accounts and their properties for the ServerId value that you specify.
(dict) --
Returns properties of the user that you specify.
Arn (string) --
The unique Amazon Resource Name (ARN) for the user that you want to learn about.
HomeDirectory (string) --
Specifies the location that files are written to or read from an Amazon S3 bucket for the user you specify by their ARN.
HomeDirectoryType (string) --
The type of landing directory (folder) you mapped for your users\' home directory. If you set it to PATH , the user will see the absolute Amazon S3 bucket paths as is in their file transfer protocol clients. If you set it LOGICAL , you will need to provide mappings in the HomeDirectoryMappings for how you want to make Amazon S3 paths visible to your users.
Role (string) --
The role in use by this user. A role is an AWS Identity and Access Management (IAM) entity that, in this case, allows a file transfer protocol-enabled server to act on a user\'s behalf. It allows the server to inherit the trust relationship that enables that user to perform file operations to their Amazon S3 bucket.
SshPublicKeyCount (integer) --
The number of SSH public keys stored for the user you specified.
UserName (string) --
The name of the user whose ARN was specified. User names are used for authentication purposes.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidNextTokenException
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
:return: {
'NextToken': 'string',
'ServerId': 'string',
'Users': [
{
'Arn': 'string',
'HomeDirectory': 'string',
'HomeDirectoryType': 'PATH'|'LOGICAL',
'Role': 'string',
'SshPublicKeyCount': 123,
'UserName': 'string'
},
]
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidNextTokenException
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def start_server(ServerId=None):
"""
Changes the state of a file transfer protocol-enabled server from OFFLINE to ONLINE . It has no impact on a server that is already ONLINE . An ONLINE server can accept and process file transfer jobs.
The state of STARTING indicates that the server is in an intermediate state, either not fully able to respond, or not fully online. The values of START_FAILED can indicate an error condition.
No response is returned from this call.
See also: AWS API Documentation
Exceptions
:example: response = client.start_server(
ServerId='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server that you start.\n
"""
pass
def stop_server(ServerId=None):
"""
Changes the state of a file transfer protocol-enabled server from ONLINE to OFFLINE . An OFFLINE server cannot accept and process file transfer jobs. Information tied to your server, such as server and user properties, are not affected by stopping your server. Stopping the server will not reduce or impact your file transfer protocol endpoint billing.
The state of STOPPING indicates that the server is in an intermediate state, either not fully able to respond, or not fully offline. The values of STOP_FAILED can indicate an error condition.
No response is returned from this call.
See also: AWS API Documentation
Exceptions
:example: response = client.stop_server(
ServerId='string'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server that you stopped.\n
"""
pass
def tag_resource(Arn=None, Tags=None):
"""
Attaches a key-value pair to a resource, as identified by its Amazon Resource Name (ARN). Resources are users, servers, roles, and other entities.
There is no response returned from this call.
See also: AWS API Documentation
Exceptions
:example: response = client.tag_resource(
Arn='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type Arn: string
:param Arn: [REQUIRED]\nAn Amazon Resource Name (ARN) for a specific AWS resource, such as a server, user, or role.\n
:type Tags: list
:param Tags: [REQUIRED]\nKey-value pairs assigned to ARNs that you can use to group and search for resources by type. You can attach this metadata to user accounts for any purpose.\n\n(dict) --Creates a key-value pair for a specific resource. Tags are metadata that you can use to search for and group a resource for various purposes. You can apply tags to servers, users, and roles. A tag key can take more than one value. For example, to group servers for accounting purposes, you might create a tag called Group and assign the values Research and Accounting to that group.\n\nKey (string) -- [REQUIRED]The name assigned to the tag that you create.\n\nValue (string) -- [REQUIRED]Contains one or more values that you assigned to the key name you create.\n\n\n\n\n
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def test_identity_provider(ServerId=None, UserName=None, UserPassword=None, ServerProtocol=None):
"""
If the IdentityProviderType of a file transfer protocol-enabled server is API_Gateway , tests whether your API Gateway is set up successfully. We highly recommend that you call this operation to test your authentication method as soon as you create your server. By doing so, you can troubleshoot issues with the API Gateway integration to ensure that your users can successfully use the service.
See also: AWS API Documentation
Exceptions
:example: response = client.test_identity_provider(
ServerId='string',
UserName='string',
UserPassword='string',
ServerProtocol='SFTP'|'FTP'|'FTPS'
)
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned identifier for a specific file transfer protocol-enabled server. That server\'s user authentication method is tested with a user name and password.\n
:type UserName: string
:param UserName: [REQUIRED]\nThe name of the user account to be tested.\n
:type UserPassword: string
:param UserPassword: The password of the user account to be tested.
:type ServerProtocol: string
:param ServerProtocol: The type of file transfer protocol to be tested.\nThe available protocols are:\n\nSecure Shell (SSH) File Transfer Protocol (SFTP)\nFile Transfer Protocol Secure (FTPS)\nFile Transfer Protocol (FTP)\n\n
:rtype: dict
ReturnsResponse Syntax
{
'Response': 'string',
'StatusCode': 123,
'Message': 'string',
'Url': 'string'
}
Response Structure
(dict) --
Response (string) --
The response that is returned from your API Gateway.
StatusCode (integer) --
The HTTP status code that is the response from your API Gateway.
Message (string) --
A message that indicates whether the test was successful or not.
Url (string) --
The endpoint of the service used to authenticate a user.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
:return: {
'Response': 'string',
'StatusCode': 123,
'Message': 'string',
'Url': 'string'
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def untag_resource(Arn=None, TagKeys=None):
"""
Detaches a key-value pair from a resource, as identified by its Amazon Resource Name (ARN). Resources are users, servers, roles, and other entities.
No response is returned from this call.
See also: AWS API Documentation
Exceptions
:example: response = client.untag_resource(
Arn='string',
TagKeys=[
'string',
]
)
:type Arn: string
:param Arn: [REQUIRED]\nThe value of the resource that will have the tag removed. An Amazon Resource Name (ARN) is an identifier for a specific AWS resource, such as a server, user, or role.\n
:type TagKeys: list
:param TagKeys: [REQUIRED]\nTagKeys are key-value pairs assigned to ARNs that can be used to group and search for resources by type. This metadata can be attached to resources for any purpose.\n\n(string) --\n\n
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
"""
pass
def update_server(Certificate=None, EndpointDetails=None, EndpointType=None, HostKey=None, IdentityProviderDetails=None, LoggingRole=None, Protocols=None, ServerId=None):
"""
Updates the file transfer protocol-enabled server\'s properties after that server has been created.
The UpdateServer call returns the ServerId of the server you updated.
See also: AWS API Documentation
Exceptions
:example: response = client.update_server(
Certificate='string',
EndpointDetails={
'AddressAllocationIds': [
'string',
],
'SubnetIds': [
'string',
],
'VpcEndpointId': 'string',
'VpcId': 'string'
},
EndpointType='PUBLIC'|'VPC'|'VPC_ENDPOINT',
HostKey='string',
IdentityProviderDetails={
'Url': 'string',
'InvocationRole': 'string'
},
LoggingRole='string',
Protocols=[
'SFTP'|'FTP'|'FTPS',
],
ServerId='string'
)
:type Certificate: string
:param Certificate: The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. Required when Protocols is set to FTPS .
:type EndpointDetails: dict
:param EndpointDetails: The virtual private cloud (VPC) endpoint settings that are configured for your file transfer protocol-enabled server. With a VPC endpoint, you can restrict access to your server to resources only within your VPC. To control incoming internet traffic, you will need to associate one or more Elastic IP addresses with your server\'s endpoint.\n\nAddressAllocationIds (list) --A list of address allocation IDs that are required to attach an Elastic IP address to your file transfer protocol-enabled server\'s endpoint. This is only valid in the UpdateServer API.\n\nNote\nThis property can only be use when EndpointType is set to VPC .\n\n\n(string) --\n\n\nSubnetIds (list) --A list of subnet IDs that are required to host your file transfer protocol-enabled server endpoint in your VPC.\n\n(string) --\n\n\nVpcEndpointId (string) --The ID of the VPC endpoint.\n\nVpcId (string) --The VPC ID of the VPC in which a file transfer protocol-enabled server\'s endpoint will be hosted.\n\n\n
:type EndpointType: string
:param EndpointType: The type of endpoint that you want your file transfer protocol-enabled server to connect to. You can choose to connect to the public internet or a VPC endpoint. With a VPC endpoint, your server isn\'t accessible over the public internet.
:type HostKey: string
:param HostKey: The RSA private key as generated by ssh-keygen -N '' -f my-new-server-key .\n\nWarning\nIf you aren\'t planning to migrate existing users from an existing file transfer protocol-enabled server to a new server, don\'t update the host key. Accidentally changing a server\'s host key can be disruptive.\n\nFor more information, see Changing the Host Key for Your AWS Transfer Family Server in the AWS Transfer Family User Guide .\n
:type IdentityProviderDetails: dict
:param IdentityProviderDetails: An array containing all of the information required to call a customer\'s authentication API method.\n\nUrl (string) --Contains the location of the service endpoint used to authenticate users.\n\nInvocationRole (string) --Provides the type of InvocationRole used to authenticate the user account.\n\n\n
:type LoggingRole: string
:param LoggingRole: Changes the AWS Identity and Access Management (IAM) role that allows Amazon S3 events to be logged in Amazon CloudWatch, turning logging on or off.
:type Protocols: list
:param Protocols: Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server\'s endpoint. The available protocols are:\n\nSecure Shell (SSH) File Transfer Protocol (SFTP): File transfer over SSH\nFile Transfer Protocol Secure (FTPS): File transfer with TLS encryption\nFile Transfer Protocol (FTP): Unencrypted file transfer\n\n\n(string) --\n\n
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server instance that the user account is assigned to.\n
:rtype: dict
ReturnsResponse Syntax
{
'ServerId': 'string'
}
Response Structure
(dict) --
ServerId (string) --
A system-assigned unique identifier for a file transfer protocol-enabled server that the user account is assigned to.
Exceptions
Transfer.Client.exceptions.AccessDeniedException
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.ConflictException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
:return: {
'ServerId': 'string'
}
:returns:
Transfer.Client.exceptions.AccessDeniedException
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.ConflictException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceExistsException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
"""
pass
def update_user(HomeDirectory=None, HomeDirectoryType=None, HomeDirectoryMappings=None, Policy=None, Role=None, ServerId=None, UserName=None):
"""
Assigns new properties to a user. Parameters you pass modify any or all of the following: the home directory, role, and policy for the UserName and ServerId you specify.
The response returns the ServerId and the UserName for the updated user.
See also: AWS API Documentation
Exceptions
:example: response = client.update_user(
HomeDirectory='string',
HomeDirectoryType='PATH'|'LOGICAL',
HomeDirectoryMappings=[
{
'Entry': 'string',
'Target': 'string'
},
],
Policy='string',
Role='string',
ServerId='string',
UserName='string'
)
:type HomeDirectory: string
:param HomeDirectory: Specifies the landing directory (folder) for a user when they log in to the file transfer protocol-enabled server using their file transfer protocol client.\nAn example is your-Amazon-S3-bucket-name>/home/username .\n
:type HomeDirectoryType: string
:param HomeDirectoryType: The type of landing directory (folder) you want your users\' home directory to be when they log into the file transfer protocol-enabled server. If you set it to PATH , the user will see the absolute Amazon S3 bucket paths as is in their file transfer protocol clients. If you set it LOGICAL , you will need to provide mappings in the HomeDirectoryMappings for how you want to make Amazon S3 paths visible to your users.
:type HomeDirectoryMappings: list
:param HomeDirectoryMappings: Logical directory mappings that specify what Amazon S3 paths and keys should be visible to your user and how you want to make them visible. You will need to specify the 'Entry ' and 'Target ' pair, where Entry shows how the path is made visible and Target is the actual Amazon S3 path. If you only specify a target, it will be displayed as is. You will need to also make sure that your AWS IAM Role provides access to paths in Target . The following is an example.\n\n\'[ '/bucket2/documentation', { 'Entry': 'your-personal-report.pdf', 'Target': '/bucket3/customized-reports/${transfer:UserName}.pdf' } ]\'\nIn most cases, you can use this value instead of the scope-down policy to lock your user down to the designated home directory ('chroot'). To do this, you can set Entry to \'/\' and set Target to the HomeDirectory parameter value.\n\nNote\nIf the target of a logical directory entry does not exist in Amazon S3, the entry will be ignored. As a workaround, you can use the Amazon S3 api to create 0 byte objects as place holders for your directory. If using the CLI, use the s3api call instead of s3 so you can use the put-object operation. For example, you use the following: aws s3api put-object --bucket bucketname --key path/to/folder/ . Make sure that the end of the key name ends in a / for it to be considered a folder.\n\n\n(dict) --Represents an object that contains entries and a targets for HomeDirectoryMappings .\n\nEntry (string) -- [REQUIRED]Represents an entry and a target for HomeDirectoryMappings .\n\nTarget (string) -- [REQUIRED]Represents the map target that is used in a HomeDirectorymapEntry .\n\n\n\n\n
:type Policy: string
:param Policy: Allows you to supply a scope-down policy for your user so you can use the same AWS Identity and Access Management (IAM) role across multiple users. The policy scopes down user access to portions of your Amazon S3 bucket. Variables you can use inside this policy include ${Transfer:UserName} , ${Transfer:HomeDirectory} , and ${Transfer:HomeBucket} .\n\nNote\nFor scope-down policies, AWS Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the Policy argument.\nFor an example of a scope-down policy, see Creating a Scope-Down Policy .\nFor more information, see AssumeRole in the AWS Security Token Service API Reference .\n\n
:type Role: string
:param Role: The IAM role that controls your users\' access to your Amazon S3 bucket. The policies attached to this role will determine the level of access you want to provide your users when transferring files into and out of your Amazon S3 bucket or buckets. The IAM role should also contain a trust relationship that allows the file transfer protocol-enabled server to access your resources when servicing your users\' transfer requests.
:type ServerId: string
:param ServerId: [REQUIRED]\nA system-assigned unique identifier for a file transfer protocol-enabled server instance that the user account is assigned to.\n
:type UserName: string
:param UserName: [REQUIRED]\nA unique string that identifies a user and is associated with a file transfer protocol-enabled server as specified by the ServerId . This is the string that will be used by your user when they log in to your server. This user name is a minimum of 3 and a maximum of 32 characters long. The following are valid characters: a-z, A-Z, 0-9, underscore, and hyphen. The user name can\'t start with a hyphen.\n
:rtype: dict
ReturnsResponse Syntax
{
'ServerId': 'string',
'UserName': 'string'
}
Response Structure
(dict) --
UpdateUserResponse returns the user name and file transfer protocol-enabled server identifier for the request to update a user\'s properties.
ServerId (string) --
A system-assigned unique identifier for a file transfer protocol-enabled server instance that the user account is assigned to.
UserName (string) --
The unique identifier for a user that is assigned to a file transfer protocol-enabled server instance that was specified in the request.
Exceptions
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
:return: {
'ServerId': 'string',
'UserName': 'string'
}
:returns:
Transfer.Client.exceptions.ServiceUnavailableException
Transfer.Client.exceptions.InternalServiceError
Transfer.Client.exceptions.InvalidRequestException
Transfer.Client.exceptions.ResourceNotFoundException
Transfer.Client.exceptions.ThrottlingException
"""
pass
| 43.164349
| 1,673
| 0.720938
| 8,921
| 68,286
| 5.505437
| 0.07813
| 0.031274
| 0.060105
| 0.051676
| 0.796474
| 0.771898
| 0.746915
| 0.724518
| 0.698884
| 0.680824
| 0
| 0.002056
| 0.209545
| 68,286
| 1,581
| 1,674
| 43.191651
| 0.90785
| 0.964341
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.522727
| 0.022727
| 0
| 0.522727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
7b864efcac9628b2dc8636c3fde738ddc7058f78
| 198
|
py
|
Python
|
test/fixtures.py
|
willvanwazer/congress
|
d6c89d3513770e7b2a6ff966f1709d1e2571e256
|
[
"CC0-1.0"
] | 2
|
2018-04-24T04:17:08.000Z
|
2018-06-11T16:58:02.000Z
|
test/fixtures.py
|
JT5D/congress
|
fe2533a4d973a3a04c6eab66662fac7c4c16a3a6
|
[
"Unlicense"
] | null | null | null |
test/fixtures.py
|
JT5D/congress
|
fe2533a4d973a3a04c6eab66662fac7c4c16a3a6
|
[
"Unlicense"
] | null | null | null |
import bill_info
def open_bill(bill_id):
return open("test/fixtures/bills/%s/information.html" % bill_id).read()
def bill(bill_id):
return bill_info.parse_bill(bill_id, open_bill(bill_id), {})
| 28.285714
| 73
| 0.757576
| 34
| 198
| 4.117647
| 0.441176
| 0.214286
| 0.285714
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09596
| 198
| 7
| 74
| 28.285714
| 0.782123
| 0
| 0
| 0
| 0
| 0
| 0.19598
| 0.19598
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c87e1d904a1578c493719bb201c9457a534f6bbf
| 9,521
|
py
|
Python
|
server/users/tests.py
|
ignatowski/mastermind
|
338a2e8de5f829a934b256b8a97127a8075639ab
|
[
"MIT"
] | null | null | null |
server/users/tests.py
|
ignatowski/mastermind
|
338a2e8de5f829a934b256b8a97127a8075639ab
|
[
"MIT"
] | null | null | null |
server/users/tests.py
|
ignatowski/mastermind
|
338a2e8de5f829a934b256b8a97127a8075639ab
|
[
"MIT"
] | null | null | null |
from .models import User
from rest_framework.test import APITestCase
from django.urls import reverse
from rest_framework import status
class UsersTest(APITestCase):
"""Class to test user related services."""
def setUp(self) -> None:
"""Initial set up for testing user services."""
self.testuser = User.objects.create_user('testuser', 'testuser@test.com', 'password1234')
self.user_register_url = reverse('user-register')
self.user_login_url = reverse('user-login')
def test_user_register_success(self) -> None:
"""Test that a user is able to be successfully created."""
# valid user data
data = {
'username': 'testuser1',
'email': 'testuser1@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 2)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['username'], data['username'])
self.assertEqual(response.data['email'], data['email'])
self.assertTrue('token' in response.data)
self.assertTrue('id' in response.data)
self.assertFalse('password' in response.data)
def test_user_register_fail_username_empty(self) -> None:
"""Test that user registration fails based on username being empty."""
# username empty
data = {
'username': '',
'email': 'testuser2@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_username_short(self) -> None:
"""Test that user registration fails based on username being too short."""
# username too short
data = {
'username': 'tes',
'email': 'testuser2@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_username_long(self) -> None:
"""Test that user registration fails based on username being too long."""
# username too long
data = {
'username': 'test12345678901234567890123456789',
'email': 'testuser2@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_username_duplicate(self) -> None:
"""Test that user registration fails based on username being duplicate."""
# username duplicate
data = {
'username': 'testuser',
'email': 'testuser2@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_email_empty(self) -> None:
"""Test that user registration fails based on email being empty."""
# email empty
data = {
'username': 'testuser2',
'email': '',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_email_long(self) -> None:
"""Test that user registration fails based on email being too long."""
# email too long
data = {
'username': 'testuser2',
'email': 'testuser2123456789012345@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_email_duplicate(self) -> None:
"""Test that user registration fails based on email being duplicate."""
# email duplicate
data = {
'username': 'testuser2',
'email': 'testuser@test.com',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_email_invalid(self) -> None:
"""Test that user registration fails based on email being invalid."""
# email invalid
data = {
'username': 'testuser2',
'email': 'testuser2',
'password': 'password1234'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_password_empty(self) -> None:
"""Test that user registration fails based on password being empty."""
# username empty
data = {
'username': 'testuser2',
'email': 'testuser2@test.com',
'password': ''
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_password_short(self) -> None:
"""Test that user registration fails based on password being too short."""
# password too short
data = {
'username': 'testuser2',
'email': 'testuser2@test.com',
'password': 'passwor'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_register_fail_password_long(self) -> None:
"""Test that user registration fails based on password being too long."""
# password too long
data = {
'username': 'testuser2',
'email': 'testuser2@test.com',
'password': 'password1234567890123456789012345'
}
response = self.client.post(self.user_register_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_login_success(self) -> None:
"""Test that a user is able to be successfully logged in."""
# valid user data
data = {
'username': 'testuser',
'password': 'password1234'
}
response = self.client.post(self.user_login_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['username'], data['username'])
self.assertTrue('token' in response.data)
self.assertFalse('password' in response.data)
def test_user_login_fail_username_empty(self) -> None:
"""Test that user login fails based on username being empty."""
# username empty
data = {
'username': '',
'password': 'password1234'
}
response = self.client.post(self.user_login_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_login_fail_username_invalid(self) -> None:
"""Test that user login fails based on username being invalid."""
# username empty
data = {
'username': 'testuser2',
'password': 'password1234'
}
response = self.client.post(self.user_login_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_login_fail_password_empty(self) -> None:
"""Test that user login fails based on password being empty."""
# password empty
data = {
'username': 'testuser',
'password': ''
}
response = self.client.post(self.user_login_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_login_fail_password_invalid(self) -> None:
"""Test that user login fails based on password being invalid."""
# password empty
data = {
'username': 'testuser',
'password': 'password12345'
}
response = self.client.post(self.user_login_url, data, format='json')
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| 37.046693
| 97
| 0.631026
| 1,063
| 9,521
| 5.475071
| 0.084666
| 0.095361
| 0.079038
| 0.046735
| 0.853093
| 0.830412
| 0.802062
| 0.784364
| 0.736942
| 0.733849
| 0
| 0.029966
| 0.25344
| 9,521
| 256
| 98
| 37.191406
| 0.788829
| 0.149669
| 0
| 0.642424
| 0
| 0
| 0.135145
| 0.0124
| 0
| 0
| 0
| 0
| 0.254545
| 1
| 0.109091
| false
| 0.151515
| 0.024242
| 0
| 0.139394
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c8bb54863d23f94231a37f682012bfe6ca997e70
| 191
|
py
|
Python
|
src/pathlibext/match.py
|
gpcimino/pathlibext
|
10c3d102a5d8a074a9a2f5cdb0356d0187b3eada
|
[
"MIT"
] | 2
|
2021-12-13T21:32:50.000Z
|
2021-12-23T11:17:06.000Z
|
src/pathlibext/match.py
|
gpcimino/pathlibext
|
10c3d102a5d8a074a9a2f5cdb0356d0187b3eada
|
[
"MIT"
] | null | null | null |
src/pathlibext/match.py
|
gpcimino/pathlibext
|
10c3d102a5d8a074a9a2f5cdb0356d0187b3eada
|
[
"MIT"
] | null | null | null |
import fnmatch
def match(self, wildcards: str):
return fnmatch.fnmatch(str(self), wildcards)
def matchcase(self, wildcards: str):
return fnmatch.fnmatchcase(str(self), wildcards)
| 19.1
| 52
| 0.73822
| 24
| 191
| 5.875
| 0.416667
| 0.368794
| 0.22695
| 0.312057
| 0.411348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146597
| 191
| 9
| 53
| 21.222222
| 0.865031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
c8c7e389569b98370469bf5c4c085d65118a2712
| 1,040
|
py
|
Python
|
source/recommendation/matching/crawler.py
|
ieesejin/graduation-project
|
90e3800a61e138c941e8d4842b806ce23c4867b0
|
[
"Apache-2.0"
] | null | null | null |
source/recommendation/matching/crawler.py
|
ieesejin/graduation-project
|
90e3800a61e138c941e8d4842b806ce23c4867b0
|
[
"Apache-2.0"
] | null | null | null |
source/recommendation/matching/crawler.py
|
ieesejin/graduation-project
|
90e3800a61e138c941e8d4842b806ce23c4867b0
|
[
"Apache-2.0"
] | null | null | null |
from icrawler.builtin import GoogleImageCrawler
# google_crawler = GoogleImageCrawler(parser_threads=2, downloader_threads=4, storage={'root_dir': '대학생룩'})
#
# google_crawler.crawl(keyword='대학생룩', max_num=500, min_size=(200,200), max_size=None)
#
# google_crawler = GoogleImageCrawler(parser_threads=2, downloader_threads=4, storage={'root_dir': '결혼식룩'})
#
# google_crawler.crawl(keyword='결혼식룩', max_num=500, min_size=(200,200), max_size=None)
#
# google_crawler = GoogleImageCrawler(parser_threads=2, downloader_threads=4, storage={'root_dir': '힙합룩'})
#
# google_crawler.crawl(keyword='힙합패션', max_num=500, min_size=(200,200), max_size=None)
#
# google_crawler = GoogleImageCrawler(parser_threads=2, downloader_threads=4, storage={'root_dir': '패션테러'})
#
# google_crawler.crawl(keyword='패션테러', max_num=500, min_size=(200,200), max_size=None)
google_crawler = GoogleImageCrawler(parser_threads=2, downloader_threads=4, storage={'root_dir': '점프샷'})
google_crawler.crawl(keyword='점프샷', max_num=500, min_size=(200, 200), max_size=None)
| 49.52381
| 107
| 0.764423
| 145
| 1,040
| 5.206897
| 0.206897
| 0.172185
| 0.205298
| 0.245033
| 0.728477
| 0.728477
| 0.728477
| 0.728477
| 0.728477
| 0.728477
| 0
| 0.057232
| 0.075962
| 1,040
| 20
| 108
| 52
| 0.728408
| 0.739423
| 0
| 0
| 0
| 0
| 0.054902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
a90ba8d3ce5e7121c0a0a29902abccaf77526b2b
| 412
|
py
|
Python
|
TripMapR/travelogue/tests/__init__.py
|
rahulvgmail/TripMapR
|
d2dc4770fc87fdefda3fe9effdcd4683a8abebaa
|
[
"BSD-3-Clause"
] | null | null | null |
TripMapR/travelogue/tests/__init__.py
|
rahulvgmail/TripMapR
|
d2dc4770fc87fdefda3fe9effdcd4683a8abebaa
|
[
"BSD-3-Clause"
] | null | null | null |
TripMapR/travelogue/tests/__init__.py
|
rahulvgmail/TripMapR
|
d2dc4770fc87fdefda3fe9effdcd4683a8abebaa
|
[
"BSD-3-Clause"
] | null | null | null |
from travelogue.tests.test_effect import *
from travelogue.tests.test_travelogue import *
from travelogue.tests.test_photo import *
from travelogue.tests.test_photosize import *
from travelogue.tests.test_resize import *
from travelogue.tests.test_views_photo import *
from travelogue.tests.test_views_travelogue import *
from travelogue.tests.test_sitemap import *
from travelogue.tests.test_zipupload import *
| 41.2
| 52
| 0.847087
| 56
| 412
| 6.035714
| 0.214286
| 0.372781
| 0.505917
| 0.612426
| 0.804734
| 0.547337
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087379
| 412
| 9
| 53
| 45.777778
| 0.898936
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a9680d337f0f0a203adabc63d1b2caa50877764f
| 137
|
py
|
Python
|
flash/text/seq2seq/summarization/__init__.py
|
alvin-chang/lightning-flash
|
481d4d369ff0a5d8c2b2d9e4970c5608a92b3ff5
|
[
"Apache-2.0"
] | 2
|
2021-06-25T08:42:36.000Z
|
2021-06-25T08:49:29.000Z
|
flash/text/seq2seq/summarization/__init__.py
|
alvin-chang/lightning-flash
|
481d4d369ff0a5d8c2b2d9e4970c5608a92b3ff5
|
[
"Apache-2.0"
] | null | null | null |
flash/text/seq2seq/summarization/__init__.py
|
alvin-chang/lightning-flash
|
481d4d369ff0a5d8c2b2d9e4970c5608a92b3ff5
|
[
"Apache-2.0"
] | null | null | null |
from flash.text.seq2seq.summarization.data import SummarizationData
from flash.text.seq2seq.summarization.model import SummarizationTask
| 45.666667
| 68
| 0.883212
| 16
| 137
| 7.5625
| 0.625
| 0.14876
| 0.214876
| 0.330579
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015504
| 0.058394
| 137
| 2
| 69
| 68.5
| 0.922481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.