hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f20f1a1e45133c9d8d197e7d555d5f99162f38f4
| 138
|
py
|
Python
|
math/algebra.py
|
Rin-The-QT-Bunny/Kabalium
|
ebb2ce611b715c7f9e212426e2d914418445d3b6
|
[
"CC0-1.0"
] | null | null | null |
math/algebra.py
|
Rin-The-QT-Bunny/Kabalium
|
ebb2ce611b715c7f9e212426e2d914418445d3b6
|
[
"CC0-1.0"
] | null | null | null |
math/algebra.py
|
Rin-The-QT-Bunny/Kabalium
|
ebb2ce611b715c7f9e212426e2d914418445d3b6
|
[
"CC0-1.0"
] | null | null | null |
import sympy as sp
import numpy as np
from sympy.abc import * # easier for utils
import matplotlib.pyplot as plt # plot the graph etc
| 27.6
| 52
| 0.753623
| 24
| 138
| 4.333333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210145
| 138
| 5
| 52
| 27.6
| 0.954128
| 0.253623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f21059719d61b8751f5dd5dd128641f41e2eebac
| 601
|
py
|
Python
|
pypy/lang/prolog/builtin/__init__.py
|
camillobruni/pygirl
|
ddbd442d53061d6ff4af831c1eab153bcc771b5a
|
[
"MIT"
] | 12
|
2016-01-06T07:10:28.000Z
|
2021-05-13T23:02:02.000Z
|
pypy/lang/prolog/builtin/__init__.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | null | null | null |
pypy/lang/prolog/builtin/__init__.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | 2
|
2016-07-29T07:09:50.000Z
|
2016-10-16T08:50:26.000Z
|
# all builtins
builtins = {}
builtins_list = []
# imports to register builtins
import pypy.lang.prolog.builtin.allsolution
import pypy.lang.prolog.builtin.arithmeticbuiltin
import pypy.lang.prolog.builtin.atomconstruction
import pypy.lang.prolog.builtin.control
import pypy.lang.prolog.builtin.database
import pypy.lang.prolog.builtin.exception
import pypy.lang.prolog.builtin.formatting
import pypy.lang.prolog.builtin.metacall
import pypy.lang.prolog.builtin.parseraccess
import pypy.lang.prolog.builtin.source
import pypy.lang.prolog.builtin.termconstruction
import pypy.lang.prolog.builtin.unify
| 33.388889
| 49
| 0.84193
| 81
| 601
| 6.234568
| 0.283951
| 0.237624
| 0.332673
| 0.475248
| 0.641584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064892
| 601
| 17
| 50
| 35.352941
| 0.898577
| 0.06822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f21fa0d787ac0d897295c502cb9c8370bb0e16ab
| 239
|
py
|
Python
|
central_nodes.py
|
arielbro/attractor_learning
|
33693847f319d294a61a00f0d5c2c6457270fbfe
|
[
"MIT"
] | null | null | null |
central_nodes.py
|
arielbro/attractor_learning
|
33693847f319d294a61a00f0d5c2c6457270fbfe
|
[
"MIT"
] | null | null | null |
central_nodes.py
|
arielbro/attractor_learning
|
33693847f319d294a61a00f0d5c2c6457270fbfe
|
[
"MIT"
] | null | null | null |
"""
Code for identification of central nodes in a network.
Let G_v be the network where f_v is a variable, then a node is "significant" if #ATTRACTORS(G_v) > #ATTRACTORS(G),
and "central" if #ATTRACTORS(G_v) = max_u(#ATTRACTORS(G_u))
"""
| 34.142857
| 114
| 0.723849
| 43
| 239
| 3.883721
| 0.604651
| 0.263473
| 0.155689
| 0.167665
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154812
| 239
| 6
| 115
| 39.833333
| 0.826733
| 0.958159
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4860e8cfec6aff1746e7d168c3631d24d9b65a3b
| 74
|
py
|
Python
|
dynamo/pp.py
|
xing-lab-pitt/dynamo-release
|
76c1f2a270dd6722b88f4700aac1a1a725a0c261
|
[
"BSD-3-Clause"
] | 236
|
2019-07-09T22:06:21.000Z
|
2022-03-31T17:56:07.000Z
|
dynamo/pp.py
|
xing-lab-pitt/dynamo-release
|
76c1f2a270dd6722b88f4700aac1a1a725a0c261
|
[
"BSD-3-Clause"
] | 115
|
2019-07-12T19:06:21.000Z
|
2022-03-31T17:34:18.000Z
|
dynamo/pp.py
|
xing-lab-pitt/dynamo-release
|
76c1f2a270dd6722b88f4700aac1a1a725a0c261
|
[
"BSD-3-Clause"
] | 34
|
2019-07-10T03:34:04.000Z
|
2022-03-22T12:44:22.000Z
|
"""Mapping Vector Field of Single Cells
"""
from .preprocessing import *
| 14.8
| 39
| 0.72973
| 9
| 74
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 4
| 40
| 18.5
| 0.870968
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
48798b7ee259c23f43d4f3d19f0a1f612b0bce0a
| 98
|
py
|
Python
|
ReconstructOrder/workflow/__init__.py
|
czbiohub/reconstruct-order
|
e729ae3871aea0a5ec2d42744a9448c7f0a93037
|
[
"Unlicense"
] | 6
|
2019-10-30T23:00:01.000Z
|
2021-03-02T19:09:07.000Z
|
ReconstructOrder/workflow/__init__.py
|
czbiohub/ReconstructOrder
|
e729ae3871aea0a5ec2d42744a9448c7f0a93037
|
[
"Unlicense"
] | 14
|
2019-07-08T22:51:29.000Z
|
2019-07-13T15:44:01.000Z
|
ReconstructOrder/workflow/__init__.py
|
mehta-lab/reconstruct-order
|
e729ae3871aea0a5ec2d42744a9448c7f0a93037
|
[
"Unlicense"
] | 2
|
2020-05-02T23:28:36.000Z
|
2020-07-16T23:46:46.000Z
|
# bchhun, {4/29/19}
from . import multiDimProcess
from .reconstructBatch import reconstruct_batch
| 24.5
| 47
| 0.806122
| 12
| 98
| 6.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057471
| 0.112245
| 98
| 4
| 47
| 24.5
| 0.83908
| 0.173469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6f9113a70800ce3b6f5efa27098605d5c484952e
| 14,028
|
py
|
Python
|
snapshotServer/tests/views/test_FileUploadView.py
|
bhecquet/seleniumRobot-server
|
b5930a21a25d63f2071dd57a55855b62808800d1
|
[
"Apache-2.0"
] | null | null | null |
snapshotServer/tests/views/test_FileUploadView.py
|
bhecquet/seleniumRobot-server
|
b5930a21a25d63f2071dd57a55855b62808800d1
|
[
"Apache-2.0"
] | 95
|
2017-05-04T09:00:52.000Z
|
2022-03-11T23:19:20.000Z
|
snapshotServer/tests/views/test_FileUploadView.py
|
bhecquet/seleniumRobot-server
|
b5930a21a25d63f2071dd57a55855b62808800d1
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on 15 mai 2017
@author: behe
'''
import datetime
import os
from django.urls.base import reverse
import pytz
from rest_framework.test import APITestCase
from snapshotServer.tests import authenticate_test_client_for_api
from snapshotServer.models import TestCase, TestStep, TestSession, \
TestEnvironment, Version, Snapshot, TestCaseInSession, Application, \
StepResult
from django.conf import settings
class TestFileUploadView(APITestCase):
fixtures = ['snapshotServer.yaml']
media_dir = settings.MEDIA_ROOT + os.sep + 'documents'
def setUp(self):
authenticate_test_client_for_api(self.client)
# test in a version
self.testCase = TestCase(name='test upload', application=Application.objects.get(id=1))
self.testCase.save()
self.step1 = TestStep.objects.get(id=1)
self.session1 = TestSession(sessionId="8888", date=datetime.datetime(2017, 5, 7, tzinfo=pytz.UTC), browser="firefox", version=Version.objects.get(pk=1), environment=TestEnvironment.objects.get(id=1), ttl=datetime.timedelta(0))
self.session1.save()
self.tcs1 = TestCaseInSession(testCase=self.testCase, session=self.session1)
self.tcs1.save()
self.sr1 = StepResult(step=self.step1, testCase=self.tcs1, result=True)
self.sr1.save()
self.session_same_env = TestSession(sessionId="8889", date=datetime.datetime(2017, 5, 7, tzinfo=pytz.UTC), browser="firefox", version=Version.objects.get(pk=1), environment=TestEnvironment.objects.get(id=1), ttl=datetime.timedelta(0))
self.session_same_env.save()
self.tcs_same_env = TestCaseInSession(testCase=self.testCase, session=self.session_same_env)
self.tcs_same_env.save()
self.step_result_same_env = StepResult(step=self.step1, testCase=self.tcs_same_env, result=True)
self.step_result_same_env.save()
self.session_other_env = TestSession(sessionId="8890", date=datetime.datetime(2017, 5, 7, tzinfo=pytz.UTC), browser="firefox", version=Version.objects.get(pk=1), environment=TestEnvironment.objects.get(id=2), ttl=datetime.timedelta(0))
self.session_other_env.save()
self.tcs_other_env = TestCaseInSession(testCase=self.testCase, session=self.session_other_env)
self.tcs_other_env.save()
self.step_result_other_env = StepResult(step=self.step1, testCase=self.tcs_other_env, result=True)
self.step_result_other_env.save()
self.session_other_browser = TestSession(sessionId="8891", date=datetime.datetime(2017, 5, 7, tzinfo=pytz.UTC), browser="chrome", version=Version.objects.get(pk=1), environment=TestEnvironment.objects.get(id=1), ttl=datetime.timedelta(0))
self.session_other_browser.save()
self.tcs_other_browser = TestCaseInSession(testCase=self.testCase, session=self.session_other_browser)
self.tcs_other_browser.save()
self.step_result_other_browser = StepResult(step=self.step1, testCase=self.tcs_other_browser, result=True)
self.step_result_other_browser.save()
def tearDown(self):
"""
Remove generated files
"""
super().tearDown()
for f in os.listdir(self.media_dir):
if f.startswith('engie'):
os.remove(self.media_dir + os.sep + f)
def test_post_snapshot_no_ref(self):
"""
Check a reference is created when non is found
"""
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot, "the uploaded snapshot should be recorded")
self.assertTrue(uploaded_snapshot.computed)
self.assertEqual(uploaded_snapshot.diffTolerance, 0.0)
def test_post_snapshot_no_ref_with_threshold(self):
"""
Check a reference is created when non is found
"""
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true', 'diffTolerance': 1.5})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot, "the uploaded snapshot should be recorded")
self.assertTrue(uploaded_snapshot.computed)
self.assertEqual(uploaded_snapshot.diffTolerance, 1.5)
def test_post_snapshot_existing_ref(self):
"""
Check we find the reference snapshot when it exists in the same version / same name
"""
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
uploaded_snapshot_1 = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.step_result_same_env.id, 'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot_2 = Snapshot.objects.filter(stepResult__testCase=self.tcs_same_env, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot_2, "the uploaded snapshot should be recorded")
self.assertEqual(uploaded_snapshot_2.refSnapshot, uploaded_snapshot_1)
# both snapshots are marked as computed as they have been uploaded
self.assertTrue(uploaded_snapshot_1.computed)
self.assertTrue(uploaded_snapshot_2.computed)
def test_post_snapshot_multiple_existing_ref(self):
"""
issue #61: Check that when multiple references exist for the same version / name / env / ..., we take the last one.
"""
# upload first ref
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
uploaded_snapshot_1 = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
# upload second snapshot and make it a reference
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
uploaded_snapshot_2 = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
uploaded_snapshot_2.refSnapshot = None
uploaded_snapshot_2.save()
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.step_result_same_env.id, 'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot_3 = Snapshot.objects.filter(stepResult__testCase=self.tcs_same_env, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot_3, "the uploaded snapshot should be recorded")
self.assertEqual(uploaded_snapshot_3.refSnapshot, uploaded_snapshot_2, "last snapshot should take the most recent reference snapshot available")
def test_post_snapshot_existing_ref_other_env(self):
"""
Check we cannot find the reference snapshot when it exists in the same version / same browser / same name but for a different environment
"""
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
uploaded_snapshot_1 = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.step_result_other_env.id, 'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot_2 = Snapshot.objects.filter(stepResult__testCase=self.tcs_other_env, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot_2, "the uploaded snapshot should be recorded")
# the uploaded snapshot should not have been associated to 'uploaded_snapshot_1' as environment is different
self.assertIsNone(uploaded_snapshot_2.refSnapshot)
def test_post_snapshot_existing_ref_other_browser(self):
"""
Check we cannot find the reference snapshot when it exists in the same version / same environment / same name but for a different browser
"""
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
uploaded_snapshot_1 = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.step_result_other_browser.id, 'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot_2 = Snapshot.objects.filter(stepResult__testCase=self.tcs_other_browser, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot_2, "the uploaded snapshot should be recorded")
# the uploaded snapshot should not have been associated to 'uploaded_snapshot_1' as browser is different
self.assertIsNone(uploaded_snapshot_2.refSnapshot)
def test_post_snapshot_existing_ref_in_previous_version(self):
"""
Check that we search for a reference in a previous version if none is found in the current one
"""
# same as self.testCase in a greater version
session3 = TestSession(sessionId="8890", date=datetime.datetime(2017, 5, 7, tzinfo=pytz.UTC), browser="firefox", version=Version.objects.get(pk=2), environment=TestEnvironment.objects.get(id=1), ttl=datetime.timedelta(0))
session3.save()
tcs3 = TestCaseInSession(testCase=self.testCase, session=session3)
tcs3.save()
tcs3.testSteps.set([TestStep.objects.get(id=1)])
tcs3.save()
sr3 = StepResult(step=TestStep.objects.get(id=1), testCase=tcs3, result=True)
sr3.save()
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'name': 'img', 'compare': 'true'})
uploaded_snapshot_1 = Snapshot.objects.filter(stepResult__testCase=self.tcs1, stepResult__step__id=1).last()
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': sr3.id, 'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 201, 'status code should be 201: ' + str(response.content))
uploaded_snapshot_2 = Snapshot.objects.filter(stepResult__testCase=tcs3, stepResult__step__id=1).last()
self.assertIsNotNone(uploaded_snapshot_2, "the uploaded snapshot should be recorded")
self.assertEqual(uploaded_snapshot_2.refSnapshot, uploaded_snapshot_1)
def test_post_snapshot_no_picture(self):
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 500, 'status code should be 500')
def test_post_snapshot_missing_step(self):
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'image': fp, 'name': 'img', 'compare': 'true'})
self.assertEqual(response.status_code, 500, 'status code should be 500')
def test_post_snapshot_missing_name(self):
with open('snapshotServer/tests/data/engie.png', 'rb') as fp:
response = self.client.post(reverse('upload', args=['img']), data={'stepResult': self.sr1.id, 'image': fp, 'compare': 'true'})
self.assertEqual(response.status_code, 500, 'status code should be 500')
| 63.475113
| 247
| 0.659182
| 1,707
| 14,028
| 5.24546
| 0.110135
| 0.083985
| 0.032276
| 0.037525
| 0.800089
| 0.75899
| 0.725821
| 0.716663
| 0.68193
| 0.681818
| 0
| 0.020833
| 0.216424
| 14,028
| 220
| 248
| 63.763636
| 0.793759
| 0.080268
| 0
| 0.388489
| 0
| 0
| 0.154598
| 0.042054
| 0
| 0
| 0
| 0
| 0.201439
| 1
| 0.086331
| false
| 0
| 0.057554
| 0
| 0.165468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6f94b272cd77450f0acc4204b1810e8e9b92e3ce
| 25
|
py
|
Python
|
onion.py
|
tamittal/OnionPy
|
d7ff17db449645e57b55a9456bee8e25e051d90c
|
[
"Unlicense"
] | null | null | null |
onion.py
|
tamittal/OnionPy
|
d7ff17db449645e57b55a9456bee8e25e051d90c
|
[
"Unlicense"
] | null | null | null |
onion.py
|
tamittal/OnionPy
|
d7ff17db449645e57b55a9456bee8e25e051d90c
|
[
"Unlicense"
] | null | null | null |
print("hellow worlddd")
| 12.5
| 24
| 0.72
| 3
| 25
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6f96d6e72e9bd31e29365f0d75818fb768a8473a
| 183
|
py
|
Python
|
_common/daemon.py
|
aymazon/project
|
f7b0b5e031f43c60ca00774939e35c46a08dc73d
|
[
"MIT"
] | null | null | null |
_common/daemon.py
|
aymazon/project
|
f7b0b5e031f43c60ca00774939e35c46a08dc73d
|
[
"MIT"
] | null | null | null |
_common/daemon.py
|
aymazon/project
|
f7b0b5e031f43c60ca00774939e35c46a08dc73d
|
[
"MIT"
] | null | null | null |
from common import monitor_process
from service import count_daemon_process
def main() -> None:
monitor_process(count_daemon_process())
if __name__ == '__main__':
main()
| 15.25
| 43
| 0.743169
| 23
| 183
| 5.304348
| 0.565217
| 0.229508
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169399
| 183
| 11
| 44
| 16.636364
| 0.802632
| 0
| 0
| 0
| 0
| 0
| 0.043716
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6fa1d5e76dd18a8d4ae5e3d6d686c960ccc5320b
| 26
|
py
|
Python
|
rdkit/sping/tests/__init__.py
|
kazuyaujihara/rdkit
|
06027dcd05674787b61f27ba46ec0d42a6037540
|
[
"BSD-3-Clause"
] | 1,609
|
2015-01-05T02:41:13.000Z
|
2022-03-30T21:57:24.000Z
|
rdkit/sping/tests/__init__.py
|
kazuyaujihara/rdkit
|
06027dcd05674787b61f27ba46ec0d42a6037540
|
[
"BSD-3-Clause"
] | 3,412
|
2015-01-06T12:13:33.000Z
|
2022-03-31T17:25:41.000Z
|
rdkit/sping/tests/__init__.py
|
bp-kelley/rdkit
|
e0de7c9622ce73894b1e7d9568532f6d5638058a
|
[
"BSD-3-Clause"
] | 811
|
2015-01-11T03:33:48.000Z
|
2022-03-28T11:57:49.000Z
|
# dummy package indicator
| 13
| 25
| 0.807692
| 3
| 26
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.954545
| 0.884615
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6fc6ee5be846c6fc6f8165d09164bfbca3f52e26
| 52
|
py
|
Python
|
model/__init__.py
|
czc567/UniGNN
|
bbb061f393b847ff6c7c20cab9e1ecb8f1c3eb96
|
[
"MIT"
] | 22
|
2021-05-04T04:42:05.000Z
|
2022-03-28T07:03:20.000Z
|
model/__init__.py
|
czc567/UniGNN
|
bbb061f393b847ff6c7c20cab9e1ecb8f1c3eb96
|
[
"MIT"
] | 1
|
2021-12-05T06:05:53.000Z
|
2021-12-08T08:34:48.000Z
|
model/__init__.py
|
czc567/UniGNN
|
bbb061f393b847ff6c7c20cab9e1ecb8f1c3eb96
|
[
"MIT"
] | 5
|
2021-05-22T01:54:52.000Z
|
2022-03-28T07:03:28.000Z
|
from .UniGNN import *
from .HyperGCN import HyperGCN
| 26
| 30
| 0.807692
| 7
| 52
| 6
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134615
| 52
| 2
| 30
| 26
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6fdcd75e4fc251bfa53b3010419325dcff4af3fd
| 22
|
py
|
Python
|
wNMF/__init__.py
|
nathanwalker-sp/weighted-nmf
|
b0d080ee128e44ed9d2c19b23687e74183589f54
|
[
"MIT"
] | null | null | null |
wNMF/__init__.py
|
nathanwalker-sp/weighted-nmf
|
b0d080ee128e44ed9d2c19b23687e74183589f54
|
[
"MIT"
] | null | null | null |
wNMF/__init__.py
|
nathanwalker-sp/weighted-nmf
|
b0d080ee128e44ed9d2c19b23687e74183589f54
|
[
"MIT"
] | null | null | null |
from .wNMF import wNMF
| 22
| 22
| 0.818182
| 4
| 22
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
6fe5dccd4cc54792f3b1bcd0bca002601df276db
| 45
|
py
|
Python
|
crawl/doc_indexer.py
|
SeeYouMonday/SeeYouMonday
|
d13d97f6ba4aa6374d1b30e0669476cbad2f1c08
|
[
"MIT"
] | 2
|
2018-03-20T09:29:40.000Z
|
2019-08-23T02:44:23.000Z
|
crawl/doc_indexer.py
|
SeeYouMonday/SeeYouMonday
|
d13d97f6ba4aa6374d1b30e0669476cbad2f1c08
|
[
"MIT"
] | 10
|
2018-03-21T22:53:49.000Z
|
2021-03-31T18:47:41.000Z
|
crawl/doc_indexer.py
|
SeeYouMonday/SeeYouMonday
|
d13d97f6ba4aa6374d1b30e0669476cbad2f1c08
|
[
"MIT"
] | 4
|
2018-03-21T22:49:37.000Z
|
2019-01-29T05:19:21.000Z
|
from nltk.tokenize.api import StringTokenizer
| 45
| 45
| 0.888889
| 6
| 45
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 45
| 1
| 45
| 45
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d20dfc431dc11c883c6a101af8de601f9148c665
| 1,266
|
py
|
Python
|
tests/schema/product/gql/queries/__init__.py
|
simonsobs/acondbs
|
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
|
[
"MIT"
] | null | null | null |
tests/schema/product/gql/queries/__init__.py
|
simonsobs/acondbs
|
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
|
[
"MIT"
] | 24
|
2020-04-02T19:29:07.000Z
|
2022-03-08T03:05:43.000Z
|
tests/schema/product/gql/queries/__init__.py
|
simonsobs/acondbs
|
6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6
|
[
"MIT"
] | 1
|
2020-04-08T15:48:28.000Z
|
2020-04-08T15:48:28.000Z
|
# fmt: off
from .query_all_fields import QUERY_ALL_FIELDS # noqa: F401
from .query_all_products import QUERY_ALL_PRODUCTS # noqa: F401
from .query_all_products_shallow import QUERY_ALL_PRODUCTS_SHALLOW # noqa: F401
from .query_all_products_total_count import QUERY_ALL_PRODUCTS_TOTAL_COUNT # noqa: F401
from .query_all_product_file_paths import QUERY_ALL_PRODUCT_FILE_PATHS # noqa: F401
from .query_all_product_relations import QUERY_ALL_PRODUCT_RELATIONS # noqa: F401
from .query_all_product_relations_total_count import QUERY_ALL_PRODUCT_RELATIONS_TOTAL_COUNT # noqa: F401
from .query_all_product_relation_types import QUERY_ALL_PRODUCT_RELATION_TYPES # noqa: F401
from .query_all_product_relation_types_total_count import QUERY_ALL_PRODUCT_RELATION_TYPES_TOTAL_COUNT # noqa: F401
from .query_all_product_types import QUERY_ALL_PRODUCT_TYPES # noqa: F401
from .query_field import QUERY_FIELD # noqa: F401
from .query_product import QUERY_PRODUCT # noqa: F401
from .query_product_shallow import QUERY_PRODUCT_SHALLOW # noqa: F401
from .query_product_relation import QUERY_PRODUCT_RELATION # noqa: F401
from .query_product_relation_type import QUERY_PRODUCT_RELATION_TYPE # noqa: F401
from .query_product_type import QUERY_PRODUCT_TYPE # noqa: F401
| 70.333333
| 116
| 0.85624
| 192
| 1,266
| 5.15625
| 0.109375
| 0.161616
| 0.181818
| 0.257576
| 0.738384
| 0.507071
| 0.276768
| 0.165657
| 0
| 0
| 0
| 0.042328
| 0.104265
| 1,266
| 17
| 117
| 74.470588
| 0.830688
| 0.14534
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d221e981f02e53a18689b9ce78658efe64ce373b
| 11,574
|
py
|
Python
|
authors/apps/authentication/tests/test_user_registration.py
|
andela/ah-backend-sparta
|
fcc394e486a736993702bfa1e6fd9e9b189b93ae
|
[
"BSD-3-Clause"
] | null | null | null |
authors/apps/authentication/tests/test_user_registration.py
|
andela/ah-backend-sparta
|
fcc394e486a736993702bfa1e6fd9e9b189b93ae
|
[
"BSD-3-Clause"
] | 11
|
2019-03-25T14:38:23.000Z
|
2019-04-18T08:02:10.000Z
|
authors/apps/authentication/tests/test_user_registration.py
|
andela/ah-backend-sparta
|
fcc394e486a736993702bfa1e6fd9e9b189b93ae
|
[
"BSD-3-Clause"
] | 5
|
2019-06-12T08:22:58.000Z
|
2020-02-07T08:26:37.000Z
|
"""
Module to test functionality to register a user
"""
from unittest.mock import patch
from rest_framework import status
from rest_framework.test import APITestCase
from .test_data import (
no_login_credentialds_data,
login_credentials_data,
empty_string_username,
empty_string_email,
empty_string_password,
invalid_email_data,
login_data,
test_user_data,
invalid_login_data,
login_data_miss_email,
login_data_miss_password,
empty_login_data_object,
auth_change_password,
test_user_data_password_change,
password_contain_spaces,
username_contain_spaces,
username_contains_special_characters
)
from .test_base import BaseTestCase
class TestUserRegistration(BaseTestCase):
"""
class to handle user registration and user login tests
"""
def test_register_a_user_with_no_data(self):
"""
Method to test if posted user registration user object contains no data
"""
response = self.client.post('/api/users/register/', no_login_credentialds_data, format='json')
self.assertIn(response.data["errors"]["email"][0], 'This field is required.')
self.assertIn(response.data["errors"]["username"][0], 'This field is required.')
self.assertIn(response.data["errors"]["password"][0], 'This field is required.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_contains_spaces(self):
"""
Method to test if password contains spaces
"""
response = self.client.post('/api/users/register/', password_contain_spaces, format='json')
self.assertIn(response.data["errors"]["password"][0], 'Password should not contain spaces')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_if_username_contains_spaces(self):
"""
Method to test if username contains spaces
"""
response = self.client.post('/api/users/register/', username_contain_spaces, format='json')
self.assertIn(response.data["errors"]["username"][0], 'Username should not contain spaces')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_if_username_contains_special_charcters(self):
"""
Method to test if username contains spaces
"""
response = self.client.post('/api/users/register/', username_contains_special_characters, format='json')
self.assertIn(response.data["errors"]["username"][0], 'should not contain special characters @_!#$%^&*()<>?/\\|}{~:')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_register_a_user_with_data(self):
"""
Method to test if posted registration user object contains data
"""
response = self.client.post('/api/users/register/', login_credentials_data, format='json')
self.assertEqual(response.data["email"], 'testuser@gmail.com')
self.assertEqual(response.data["username"], 'user')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response1 = self.client.post('/api/users/register/', login_credentials_data, format='json')
self.assertEqual(response1.data["errors"]["email"][0], 'Provided email address already exists, please provide a different one')
self.assertEqual(response1.data["errors"]["username"][0], 'Provided username already exist, please provide a different one')
self.assertEqual(response1.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_registered_with_empty_string_username(self):
"""
Method to test if username is an empty string
"""
response = self.client.post('/api/users/register/', empty_string_username, format='json')
self.assertIn(response.data["errors"]["username"][0], '"This field may not be blank."')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_registered_with_empty_email_string(self):
"""
Method to test if username is an empty string
"""
response = self.client.post('/api/users/register/', empty_string_email, format='json')
self.assertIn(response.data["errors"]["email"][0], '"This field may not be blank."')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_registered_with_empty_password_string(self):
"""
Method to test if username is an empty string
"""
response = self.client.post('/api/users/register/', empty_string_password, format='json')
self.assertIn(response.data["errors"]["password"][0], 'This field may not be blank.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_registered_with_an_invalid_email(self):
"""
Method to test if email is an invalid email
"""
response = self.client.post('/api/users/register/', invalid_email_data , format='json')
self.assertIn(response.data["errors"]["email"][0], 'Enter a valid email address.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_login_successfully_with_valid_data(self):
"""
Method to test if user successfully logs in using valid credentials
"""
response = self.client.post('/api/users/register/', login_credentials_data, format='json')
self.client.get('/api/users/verify/'+"?token="+ response.data['token'])
response = self.client.post('/api/users/login/', login_data, format='json')
self.assertEqual(response.data["email"], 'testuser@gmail.com')
self.assertEqual(response.data["username"], 'user')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_user_logged_in__with_invalid_data(self):
"""
Method to test if user logs in using invalid credentials
"""
self.client.post('/api/users/', login_credentials_data, format='json')
response = self.client.post(
'/api/users/login/',
invalid_login_data, format='json'
)
self.assertIn(response.data["errors"]["error"][0], 'A user with this email and password was not found.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_log_in_credentials_miss_email(self):
"""
Method to test if email is not added in the login required credentials
"""
self.client.post('/api/users/', login_credentials_data, format='json')
response = self.client.post('/api/users/login/', login_data_miss_email, format='json')
self.assertIn(response.data["errors"]["email"][0], 'This field is required.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_log_in_credentials_miss_password(self):
"""
Method to test if password is not added in the login required credentials
"""
self.client.post('/api/users/', login_credentials_data, format='json')
response = self.client.post('/api/users/login/', login_data_miss_password, format='json')
self.assertIn(response.data["errors"]["password"][0], 'This field is required.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_user_log_in_credentials_miss_password_and_email(self):
"""
Method to test if password is not added in the login required credentials
"""
self.client.post('/api/users/', login_credentials_data, format='json')
response = self.client.post('/api/users/login/', empty_login_data_object, format='json')
self.assertIn(response.data["errors"]["password"][0], 'This field is required.')
self.assertIn(response.data["errors"]["email"][0], 'This field is required.')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_get_current_user_jwt(self):
"""
Get current user from JWT
"""
user_token = self.create_user(test_user_data)
response2 = self.client.get('/api/user/', HTTP_AUTHORIZATION=user_token, format='json')
self.assertEqual(response2.data["email"], test_user_data.get('user').get('email'))
self.assertEqual(response2.data["username"], test_user_data.get('user').get('username'))
self.assertEqual(response2.status_code, status.HTTP_200_OK)
def test_change_password_user_jwt_authenticated(self):
"""
change password of authenticated user
"""
# Token = self.token
# self.client.credentials(HTTP_AUTHORIZATION = self.token)
# #print(self.token)
user_token = self.create_user(test_user_data)
response2 = self.client.put(
'/api/user/',
test_user_data_password_change,
HTTP_AUTHORIZATION=user_token,format='json'
)
self.assertEqual(response2.data["email"], test_user_data.get('user').get('email'))
self.assertEqual(response2.data["username"], test_user_data.get('user').get('username'))
self.assertEqual(response2.status_code, status.HTTP_200_OK)
def test_change_password_user_jwt_authenticated_invalid_token(self):
"""
change password of authenticated user
"""
invalid_token = 'Bearer hgfgsdyuertgsdtyshjgsdjusdhghjsdyj'
self.client.credentials(HTTP_AUTHORIZATION = invalid_token)
response2 = self.client.put('/api/user/', auth_change_password,format='json')
self.assertEqual(response2.data["detail"], 'Invalid authentication. Could not decode token.')
response2 = self.client.put('/api/user/', auth_change_password,format='json')
self.assertEqual(response2.data["detail"], 'Invalid authentication. Could not decode token.')
def test_verify_user(self):
""" tests if user can be verified on registration with token"""
res = self.client.post('/api/users/register/', test_user_data, format='json')
response = self.client.get(
'/api/users/verify/'+"?token="+res.data['token']
)
self.assertEqual(
response.data['Message'],
'Account successfully verified, your free to now login'
)
self.assertEqual(
response.status_code,
status.HTTP_200_OK
)
def test_cannot_verify_user_with_invalid_token(self):
""" tests if user cannot be verified with invalid token token"""
class MockUserToken:
@classmethod
def decode(cls, token, secret_key):
return {
'id': 1,
'exp': 1333
}
with patch('authors.apps.authentication.views.jwt', new_callable=MockUserToken):
response = self.client.get('/api/users/verify/', format='json')
self.assertEqual(response.data['Message'], 'Something went wrong')
def test_email_sent_on_registration(self):
""" """
class MockUserToken:
@classmethod
def decode(cls, token, secret_key):
return {
'id': 1,
'exp': 1333
}
with patch('authors.apps.authentication.views.jwt', new_callable=MockUserToken):
response = self.client.get('/api/users/verify/', format='json')
self.assertEqual(response.data['Message'], 'Something went wrong')
| 44.344828
| 135
| 0.665198
| 1,385
| 11,574
| 5.340794
| 0.111913
| 0.066919
| 0.041639
| 0.048263
| 0.788969
| 0.762471
| 0.737867
| 0.703799
| 0.670272
| 0.626876
| 0
| 0.010677
| 0.215051
| 11,574
| 260
| 136
| 44.515385
| 0.803522
| 0.104285
| 0
| 0.397436
| 0
| 0
| 0.19144
| 0.013092
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0.141026
| false
| 0.147436
| 0.032051
| 0.012821
| 0.205128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
d227731b7c77ae89b5f20649a54b3ca7ff3d4e9d
| 255
|
py
|
Python
|
main.py
|
timos09/flask_celojums
|
52a165854fd6ca39699c01c1cac422d870fff1bf
|
[
"MIT"
] | null | null | null |
main.py
|
timos09/flask_celojums
|
52a165854fd6ca39699c01c1cac422d870fff1bf
|
[
"MIT"
] | null | null | null |
main.py
|
timos09/flask_celojums
|
52a165854fd6ca39699c01c1cac422d870fff1bf
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template
app = Flask('app')
@app.route('/')
def index():
return render_template("index.html")
@app.route('/about')
def about():
return render_template("about.html")
app.run(host='0.0.0.0', port=8080)
| 17
| 41
| 0.647059
| 37
| 255
| 4.378378
| 0.459459
| 0.259259
| 0.246914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 0.168627
| 255
| 15
| 42
| 17
| 0.726415
| 0
| 0
| 0
| 0
| 0
| 0.152893
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.222222
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
d2624bb03264af042bdf7d9030e57502d1c9c868
| 398
|
py
|
Python
|
feast_snowflake/__init__.py
|
swetha-sundar/feast-snowflake
|
21ff6b2a091429d8efbab839906fc4c30779bf45
|
[
"Apache-2.0"
] | null | null | null |
feast_snowflake/__init__.py
|
swetha-sundar/feast-snowflake
|
21ff6b2a091429d8efbab839906fc4c30779bf45
|
[
"Apache-2.0"
] | null | null | null |
feast_snowflake/__init__.py
|
swetha-sundar/feast-snowflake
|
21ff6b2a091429d8efbab839906fc4c30779bf45
|
[
"Apache-2.0"
] | null | null | null |
from .snowflake_offline import SnowflakeOfflineStore, SnowflakeOfflineStoreConfig
from .snowflake_source import SnowflakeOptions, SnowflakeSource
from .snowflake_online import SnowflakeOnlineStore, SnowflakeOnlineStoreConfig
__all__ = ["SnowflakeOptions", "SnowflakeSource", "SnowflakeOfflineStoreConfig",
"SnowflakeOfflineStore", "SnowflakeOnlineStoreConfig", "SnowflakeOnlineStore"]
| 56.857143
| 90
| 0.844221
| 25
| 398
| 13.16
| 0.52
| 0.118541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090452
| 398
| 6
| 91
| 66.333333
| 0.90884
| 0
| 0
| 0
| 0
| 0
| 0.31407
| 0.18593
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d277e91765d7e3a61be3dae63010fabf467856cd
| 87
|
py
|
Python
|
enthought/chaco/polygon_plot.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/chaco/polygon_plot.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/chaco/polygon_plot.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from chaco.polygon_plot import *
| 21.75
| 38
| 0.83908
| 12
| 87
| 5.583333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126437
| 87
| 3
| 39
| 29
| 0.881579
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
964372a93143a172282a70d508620a1e35cc28ad
| 164
|
py
|
Python
|
api/app/logger.py
|
VidroX/recommdo
|
fe518158b1a63225816054fb129f680e1d0c7d9c
|
[
"MIT"
] | null | null | null |
api/app/logger.py
|
VidroX/recommdo
|
fe518158b1a63225816054fb129f680e1d0c7d9c
|
[
"MIT"
] | null | null | null |
api/app/logger.py
|
VidroX/recommdo
|
fe518158b1a63225816054fb129f680e1d0c7d9c
|
[
"MIT"
] | null | null | null |
import logging
from app import settings
if settings.DEBUG:
logger = logging.getLogger('uvicorn.error')
else:
logger = logging.getLogger('gunicorn.error')
| 18.222222
| 48
| 0.743902
| 20
| 164
| 6.1
| 0.65
| 0.213115
| 0.360656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152439
| 164
| 8
| 49
| 20.5
| 0.877698
| 0
| 0
| 0
| 0
| 0
| 0.164634
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
96b9110f3efb34991561eb2d2198f54ee6ba3997
| 55
|
py
|
Python
|
src/eda.py
|
SoufianeDataFan/ml_project_template
|
d4cad3d0c5e4bd03f8acf27b79ef8734eadf745a
|
[
"MIT"
] | null | null | null |
src/eda.py
|
SoufianeDataFan/ml_project_template
|
d4cad3d0c5e4bd03f8acf27b79ef8734eadf745a
|
[
"MIT"
] | null | null | null |
src/eda.py
|
SoufianeDataFan/ml_project_template
|
d4cad3d0c5e4bd03f8acf27b79ef8734eadf745a
|
[
"MIT"
] | null | null | null |
from import_modules import *
from config_file import *
| 18.333333
| 28
| 0.818182
| 8
| 55
| 5.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 55
| 2
| 29
| 27.5
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
73acecc8d7a5d4daaf12e604c744e0b02b3d676b
| 227
|
py
|
Python
|
zombase/foreman.py
|
mozaiques/zombase
|
fc8dbfdad7cdb1975fac63030fc6f6c20b6df260
|
[
"MIT"
] | 3
|
2015-01-22T21:54:49.000Z
|
2022-02-13T14:22:00.000Z
|
zombase/foreman.py
|
mozaiques/zombase
|
fc8dbfdad7cdb1975fac63030fc6f6c20b6df260
|
[
"MIT"
] | null | null | null |
zombase/foreman.py
|
mozaiques/zombase
|
fc8dbfdad7cdb1975fac63030fc6f6c20b6df260
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from zombase.worker import RawWorker
class RawForeman(RawWorker):
"""Special worker to manage other workers."""
def __init__(self, dbsession=None):
RawWorker.__init__(self, dbsession)
| 22.7
| 49
| 0.69163
| 26
| 227
| 5.730769
| 0.769231
| 0.107383
| 0.228188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005376
| 0.180617
| 227
| 9
| 50
| 25.222222
| 0.795699
| 0.273128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
73befbab8528eb5878bbf2b810e81b08e7ae9c0f
| 52
|
py
|
Python
|
bayesclumpy/__init__.py
|
aasensio/bayesclumpy2
|
148c9f7d5aa07516729fb7f417a48148cc240318
|
[
"MIT"
] | 1
|
2022-01-13T14:52:10.000Z
|
2022-01-13T14:52:10.000Z
|
bayesclumpy/__init__.py
|
aasensio/bayesclumpy2
|
148c9f7d5aa07516729fb7f417a48148cc240318
|
[
"MIT"
] | null | null | null |
bayesclumpy/__init__.py
|
aasensio/bayesclumpy2
|
148c9f7d5aa07516729fb7f417a48148cc240318
|
[
"MIT"
] | null | null | null |
from .bayesclumpy import *
#from .analysis import *
| 17.333333
| 26
| 0.75
| 6
| 52
| 6.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 2
| 27
| 26
| 0.886364
| 0.442308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
73d7520a1413ef6d8191537eec31a1e8e2f85be0
| 95
|
py
|
Python
|
api/app/error/__init__.py
|
GlenioSP/Gutenberg-Search
|
4213b594cad74f8d06f3288c2244e1bc93300bbb
|
[
"MIT"
] | 12
|
2019-10-24T12:49:16.000Z
|
2021-08-20T21:37:11.000Z
|
api/app/error/__init__.py
|
GlenioSP/Gutenberg-Search
|
4213b594cad74f8d06f3288c2244e1bc93300bbb
|
[
"MIT"
] | 2
|
2020-03-27T11:03:11.000Z
|
2020-06-22T18:25:04.000Z
|
api/app/error/__init__.py
|
GlenioSP/Gutenberg-Search
|
4213b594cad74f8d06f3288c2244e1bc93300bbb
|
[
"MIT"
] | 5
|
2020-01-08T06:12:24.000Z
|
2021-08-20T21:37:13.000Z
|
from flask import Blueprint
bp = Blueprint('error', __name__)
from app.error import handlers
| 15.833333
| 33
| 0.778947
| 13
| 95
| 5.384615
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147368
| 95
| 5
| 34
| 19
| 0.864198
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
73e9b2320ae93db81a6ad2673d9e521e765f519d
| 5,939
|
py
|
Python
|
wiggin_mito/actions/heteropolymer.py
|
golobor/wiggin_mito
|
51103894a4c7eac07cbf0cd6e891856b6e6bced7
|
[
"MIT"
] | null | null | null |
wiggin_mito/actions/heteropolymer.py
|
golobor/wiggin_mito
|
51103894a4c7eac07cbf0cd6e891856b6e6bced7
|
[
"MIT"
] | null | null | null |
wiggin_mito/actions/heteropolymer.py
|
golobor/wiggin_mito
|
51103894a4c7eac07cbf0cd6e891856b6e6bced7
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass
import logging
import numbers
from typing import Union, Tuple, Sequence, Any, Optional # noqa: F401
import numpy as np
from .. import forces
from wiggin.core import SimAction
import polychrom
import polychrom.forces
import polychrom.forcekits
logging.basicConfig(level=logging.INFO)
@dataclass
class RandomBlockParticleTypes(SimAction):
avg_block_lens: Sequence[int] = (2, 2)
_reads_shared = ['N']
_writes_shared = ['particle_types']
def configure(self):
out_shared = {}
# This solution is slow-ish (1 sec for 1e6 particles), but simple
N = self._shared['N']
avg_block_lens = self.avg_block_lens
n_types = len(avg_block_lens)
particle_types = np.full(N, -1)
p, new_p, t = 0, 0, 0
while new_p <= N:
new_p = p + np.random.geometric(1 / avg_block_lens[t])
particle_types[p : min(new_p, N)] = t
t = (t + 1) % n_types
p = new_p
out_shared["particle_types"] = particle_types
return out_shared
@dataclass
class ChainsSelectiveRepAttr(SimAction):
chains: Any = ((0, None, False),)
bond_length: float = 1.0
wiggle_dist: float = 0.25
stiffness_k: Optional[float] = None
repulsion_e: Optional[float] = 1.5 # TODO: implement np.in
attraction_e: Optional[float] = None
attraction_r: Optional[float] = None
selective_attraction_e: Optional[float] = None
particle_types: Any = None
except_bonds: bool = False
_writes_shared = ['chains']
def configure(self):
out_shared = {}
if hasattr(self.chains, "__iter__") and hasattr(
self.chains[0], "__iter__"
):
out_shared["chains"] = self.chains
elif hasattr(self.chains, "__iter__") and isinstance(
self.chains[0], numbers.Number
):
edges = np.r_[0, np.cumsum(self.chains)]
chains = [(st, end, False) for st, end in zip(edges[:-1], edges[1:])]
self.chains = chains
out_shared["chains"] = chains
return out_shared
def run_init(self, sim):
# do not use self.params!
# only use parameters from self.selfame] and self._shared
nonbonded_force_func = forces.homotypic_quartic_repulsive_attractive
nonbonded_force_kwargs = dict(
repulsionEnergy=self.repulsion_e,
repulsionRadius=1.0,
attractionEnergy=self.attraction_e,
attractionRadius=self.attraction_r,
particleTypes=self.particle_types,
selectiveAttractionEnergy=self.selective_attraction_e,
)
sim.add_force(
polychrom.forcekits.polymer_chains(
sim,
chains=self._shared["chains"],
bond_force_func=polychrom.forces.harmonic_bonds,
bond_force_kwargs={
"bondLength": self.bond_length,
"bondWiggleDistance": self.wiggle_dist,
},
angle_force_func=(
None if self.stiffness_k is None else polychrom.forces.angle_force
),
angle_force_kwargs={"k": self.stiffness_k},
nonbonded_force_func=nonbonded_force_func,
nonbonded_force_kwargs=nonbonded_force_kwargs,
except_bonds=self.except_bonds,
)
)
@dataclass
class ChainsHeteropolymerRepAttr(SimAction):
chains: Any = ((0, None, False),)
bond_length: float = 1.0
wiggle_dist: float = 0.25
stiffness_k: Optional[float] = None
repulsion_e: Optional[float] = 1.5 # TODO: implement np.in
attraction_e: Optional[float] = None
attraction_r: Optional[float] = None
particle_types: Any = None
except_bonds: bool = False
_reads_shared = ['particle_types']
_writes_shared = ['chains']
def configure(self):
out_shared = {}
if hasattr(self.chains, "__iter__") and hasattr(
self.chains[0], "__iter__"
):
out_shared["chains"] = self.chains
elif hasattr(self.chains, "__iter__") and isinstance(
self.chains[0], numbers.Number
):
edges = np.r_[0, np.cumsum(self.chains)]
chains = np.array(
[(st, end, False) for st, end in zip(edges[:-1], edges[1:])],
dtype=np.object,
)
self.chains = chains
out_shared["chains"] = chains
return out_shared
def run_init(self, sim):
# do not use self.params!
# only use parameters from self.selfame] and self._shared
nonbonded_force_func = forces.heteropolymer_quartic_repulsive_attractive
nonbonded_force_kwargs = dict(
repulsionEnergy=self.repulsion_e,
repulsionRadius=1.0,
attractionEnergies=self.attraction_e,
attractionRadius=self.attraction_r,
particleTypes=(
self._shared["particle_types"]
if self.particle_types is None
else self.particle_types
),
)
sim.add_force(
polychrom.forcekits.polymer_chains(
sim,
chains=self._shared["chains"],
bond_force_func=forces.harmonic_bonds,
bond_force_kwargs={
"bondLength": self.bond_length,
"bondWiggleDistance": self.wiggle_dist,
},
angle_force_func=(
None if self.stiffness_k is None else polychrom.forces.angle_force
),
angle_force_kwargs={"k": self.stiffness_k},
nonbonded_force_func=nonbonded_force_func,
nonbonded_force_kwargs=nonbonded_force_kwargs,
except_bonds=self.except_bonds,
)
)
| 31.257895
| 86
| 0.59185
| 654
| 5,939
| 5.108563
| 0.214067
| 0.041904
| 0.035618
| 0.025142
| 0.710865
| 0.70009
| 0.70009
| 0.70009
| 0.70009
| 0.662377
| 0
| 0.01084
| 0.316552
| 5,939
| 189
| 87
| 31.42328
| 0.812269
| 0.046809
| 0
| 0.623288
| 0
| 0
| 0.037522
| 0
| 0
| 0
| 0
| 0.005291
| 0
| 1
| 0.034247
| false
| 0
| 0.068493
| 0
| 0.315068
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fb7d4ebc7530620a4a39ba0864b25fdccf5221f6
| 1,525
|
py
|
Python
|
h5py/tests/old/test_file_image.py
|
payno/h5py
|
446548ddc160874fabca670b88bc47bcc5ead4aa
|
[
"BSD-3-Clause"
] | null | null | null |
h5py/tests/old/test_file_image.py
|
payno/h5py
|
446548ddc160874fabca670b88bc47bcc5ead4aa
|
[
"BSD-3-Clause"
] | null | null | null |
h5py/tests/old/test_file_image.py
|
payno/h5py
|
446548ddc160874fabca670b88bc47bcc5ead4aa
|
[
"BSD-3-Clause"
] | null | null | null |
import h5py
from h5py import h5f, h5p
from ..common import ut, TestCase
@ut.skipUnless(h5py.version.hdf5_version_tuple >= (1, 8, 9), 'file image operations require HDF5 >= 1.8.9')
class TestFileImage(TestCase):
def test_load_from_image(self):
from binascii import a2b_base64
from zlib import decompress
compressed_image = 'eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KKjhQ+TD5BCjNCKU7oPQKJpg4I1hOAiouCDUfXV1IkKsrSPV/NACzx4AFQnMwjIKRCDxcHQNAdASUD0ulJ5hQ1ZWkFpeAaFh69KDQXkYGNohZjDA+JCUzMkIEmKHqELQAWKkAByytOoBJViAPJM7ExATWyAE0B8RgZkyAJmlYDoEAIahukJoNU6+HMTA0UOgT6oBgP38XUI6G5UMFZrzKR8EoGAUjGMDKYVgxDSsuAHcfMK8='
image = decompress(a2b_base64(compressed_image))
fapl = h5p.create(h5py.h5p.FILE_ACCESS)
fapl.set_fapl_core()
fapl.set_file_image(image)
fid = h5f.open(self.mktemp().encode(), h5py.h5f.ACC_RDONLY, fapl=fapl)
f = h5py.File(fid)
self.assertTrue('test' in f)
def test_open_from_image(self):
from binascii import a2b_base64
from zlib import decompress
compressed_image = 'eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KKjhQ+TD5BCjNCKU7oPQKJpg4I1hOAiouCDUfXV1IkKsrSPV/NACzx4AFQnMwjIKRCDxcHQNAdASUD0ulJ5hQ1ZWkFpeAaFh69KDQXkYGNohZjDA+JCUzMkIEmKHqELQAWKkAByytOoBJViAPJM7ExATWyAE0B8RgZkyAJmlYDoEAIahukJoNU6+HMTA0UOgT6oBgP38XUI6G5UMFZrzKR8EoGAUjGMDKYVgxDSsuAHcfMK8='
image = decompress(a2b_base64(compressed_image))
fid = h5f.open_file_image(image)
f = h5py.File(fid)
self.assertTrue('test' in f)
| 41.216216
| 301
| 0.771803
| 143
| 1,525
| 8.062937
| 0.356643
| 0.031223
| 0.005204
| 0.029488
| 0.72333
| 0.72333
| 0.72333
| 0.72333
| 0.72333
| 0.666088
| 0
| 0.07154
| 0.156721
| 1,525
| 36
| 302
| 42.361111
| 0.825039
| 0
| 0
| 0.5
| 0
| 0
| 0.390164
| 0.356721
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.083333
| false
| 0
| 0.291667
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fb9ca9b59afab3b43a33075b95520d3ecd7fdfab
| 66
|
py
|
Python
|
v3/Libraries/builtin/int/hex to decimal.py
|
TheShellLand/python
|
a35e9b32bec3a3ff03d6f0f4c2c2cc891180e516
|
[
"MIT"
] | null | null | null |
v3/Libraries/builtin/int/hex to decimal.py
|
TheShellLand/python
|
a35e9b32bec3a3ff03d6f0f4c2c2cc891180e516
|
[
"MIT"
] | 1
|
2021-06-01T22:50:19.000Z
|
2021-06-01T22:50:19.000Z
|
v3/Libraries/builtin/int/hex to decimal.py
|
TheShellLand/python
|
a35e9b32bec3a3ff03d6f0f4c2c2cc891180e516
|
[
"MIT"
] | null | null | null |
int('0x7e0', 0)
# 2016
int('7e0', 16)
# 2016
hex(2016)
# '0x7e0'
| 8.25
| 15
| 0.560606
| 11
| 66
| 3.363636
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.425926
| 0.181818
| 66
| 8
| 16
| 8.25
| 0.259259
| 0.257576
| 0
| 0
| 0
| 0
| 0.173913
| 0
| 0
| 0
| 0.108696
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
83a1ce9fd3151da749a9177b4a599981006f0854
| 524
|
py
|
Python
|
videos-master/_2018/eop/reusable_imports.py
|
samsmusa/My-manim-master
|
a79266ea21fbb7e84d0133030146549f381c31cb
|
[
"MIT"
] | 5
|
2021-03-18T02:28:07.000Z
|
2021-04-10T03:40:24.000Z
|
videos-master/_2018/eop/reusable_imports.py
|
samsmusa/My-manim-master
|
a79266ea21fbb7e84d0133030146549f381c31cb
|
[
"MIT"
] | null | null | null |
videos-master/_2018/eop/reusable_imports.py
|
samsmusa/My-manim-master
|
a79266ea21fbb7e84d0133030146549f381c31cb
|
[
"MIT"
] | 1
|
2022-02-16T03:22:47.000Z
|
2022-02-16T03:22:47.000Z
|
from _2018.eop.reusables.binary_option import *
from _2018.eop.reusables.brick_row import *
from _2018.eop.reusables.coin_flip_tree import *
from _2018.eop.reusables.coin_flipping_pi_creature import *
from _2018.eop.reusables.coin_stacks import *
from _2018.eop.reusables.dice import *
from _2018.eop.reusables.eop_constants import *
from _2018.eop.reusables.eop_helpers import *
from _2018.eop.reusables.histograms import *
from _2018.eop.reusables.sick_pi_creature import *
from _2018.eop.reusables.upright_coins import *
| 43.666667
| 59
| 0.832061
| 79
| 524
| 5.21519
| 0.291139
| 0.213592
| 0.293689
| 0.533981
| 0.723301
| 0.470874
| 0.174757
| 0
| 0
| 0
| 0
| 0.091667
| 0.083969
| 524
| 11
| 60
| 47.636364
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
83c2f8f3745123753aa02052dc6a42484429c05c
| 774
|
py
|
Python
|
healtheintent_api/errors.py
|
torchbox/healtheintent-api-python
|
6e0d22b907f1c18bd24ed4b92d7bbf67539d4567
|
[
"MIT"
] | 1
|
2020-09-18T14:31:57.000Z
|
2020-09-18T14:31:57.000Z
|
healtheintent_api/errors.py
|
torchbox/healtheintent-api-python
|
6e0d22b907f1c18bd24ed4b92d7bbf67539d4567
|
[
"MIT"
] | null | null | null |
healtheintent_api/errors.py
|
torchbox/healtheintent-api-python
|
6e0d22b907f1c18bd24ed4b92d7bbf67539d4567
|
[
"MIT"
] | null | null | null |
from requests import HTTPError
class HealthEIntentHttpError(HTTPError):
"""There was an error communicating with the Health E Intent API"""
pass
class BadRequestError(HealthEIntentHttpError):
"""The API returned a 400 (Bad Request) error response"""
pass
class UnauthorizedError(HealthEIntentHttpError):
"""The API returned a 401 (Unauthorized) error response"""
pass
class NotPermittedError(HealthEIntentHttpError):
"""The API returned a 403 (Forbidden) error response"""
pass
class ResourceNotFoundError(HealthEIntentHttpError):
"""The API returned a 404 (Resource not found) error response"""
pass
class ResourceConflictError(HealthEIntentHttpError):
"""The API returned a 409 (Conflict) error response"""
pass
| 24.1875
| 71
| 0.741602
| 82
| 774
| 7
| 0.47561
| 0.078397
| 0.243902
| 0.313589
| 0.3223
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023511
| 0.175711
| 774
| 31
| 72
| 24.967742
| 0.876176
| 0.418605
| 0
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.461538
| 0.076923
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
83ce2f43b22275097db230f834cae5a7a7ac9744
| 194
|
py
|
Python
|
src/new_code_wip/led_control.py
|
madsmcmillan/banner
|
3af13f9f98f82a76a35fa0710b2d91e8f5700f68
|
[
"MIT"
] | 1
|
2018-02-16T00:07:53.000Z
|
2018-02-16T00:07:53.000Z
|
src/new_code_wip/led_control.py
|
madsmcmillan/banner
|
3af13f9f98f82a76a35fa0710b2d91e8f5700f68
|
[
"MIT"
] | null | null | null |
src/new_code_wip/led_control.py
|
madsmcmillan/banner
|
3af13f9f98f82a76a35fa0710b2d91e8f5700f68
|
[
"MIT"
] | 1
|
2019-08-29T14:28:56.000Z
|
2019-08-29T14:28:56.000Z
|
"""
############################################################################
## High Altitude Balloon Club
############################################################################
"""
| 24.25
| 76
| 0.118557
| 4
| 194
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056701
| 194
| 7
| 77
| 27.714286
| 0.125683
| 0.953608
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f704e9eac8de03f01fa77d9579930caec71292b1
| 22
|
py
|
Python
|
luvio/externals/domain_api/api.py
|
nguyenhailong253/luvio-server
|
8e75bea4171fc2367cc6d7ebd5a19382932840d5
|
[
"MIT"
] | null | null | null |
luvio/externals/domain_api/api.py
|
nguyenhailong253/luvio-server
|
8e75bea4171fc2367cc6d7ebd5a19382932840d5
|
[
"MIT"
] | null | null | null |
luvio/externals/domain_api/api.py
|
nguyenhailong253/luvio-server
|
8e75bea4171fc2367cc6d7ebd5a19382932840d5
|
[
"MIT"
] | null | null | null |
# Call Domain api here
| 22
| 22
| 0.772727
| 4
| 22
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.944444
| 0.909091
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f71146e12c601123c8c8e79c664e15a620ce0608
| 184
|
py
|
Python
|
GPGO/__init__.py
|
FNTwin/Bayesian-Optimization
|
2f89699648601d4499dcab285a1d7376f0e1ef4b
|
[
"MIT"
] | 3
|
2020-06-07T19:16:40.000Z
|
2020-07-18T21:56:13.000Z
|
GPGO/__init__.py
|
FNTwin/Bayesian-Optimization
|
2f89699648601d4499dcab285a1d7376f0e1ef4b
|
[
"MIT"
] | null | null | null |
GPGO/__init__.py
|
FNTwin/Bayesian-Optimization
|
2f89699648601d4499dcab285a1d7376f0e1ef4b
|
[
"MIT"
] | 2
|
2021-01-03T19:09:42.000Z
|
2021-01-03T19:09:42.000Z
|
#from .Opt import BayesianOptimization
from .GaussianProcess import GP
from .GaussianProcess.Kernel import RBF
from .Opt import BayesianOptimization
from .Acquisition import Acquistion
| 36.8
| 39
| 0.858696
| 21
| 184
| 7.52381
| 0.47619
| 0.088608
| 0.164557
| 0.417722
| 0.468354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103261
| 184
| 5
| 40
| 36.8
| 0.957576
| 0.201087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
f7203b8e88485ca582d2256b589f018535db2c31
| 223
|
py
|
Python
|
terminusdb_client/__init__.py
|
LogicalDash/terminusdb-client-python
|
7f13f77e60f891b1e6bd214ebf73ff7f75fcaff8
|
[
"Apache-2.0"
] | 43
|
2020-06-12T23:44:17.000Z
|
2022-03-12T15:18:55.000Z
|
terminusdb_client/__init__.py
|
LogicalDash/terminusdb-client-python
|
7f13f77e60f891b1e6bd214ebf73ff7f75fcaff8
|
[
"Apache-2.0"
] | 151
|
2020-06-12T20:23:05.000Z
|
2022-03-29T20:38:35.000Z
|
terminusdb_client/__init__.py
|
LogicalDash/terminusdb-client-python
|
7f13f77e60f891b1e6bd214ebf73ff7f75fcaff8
|
[
"Apache-2.0"
] | 46
|
2020-06-16T20:51:21.000Z
|
2022-03-17T18:11:46.000Z
|
from .woqlclient import WOQLClient # noqa
from .woqldataframe import woqlDataframe as WOQLDataFrame # noqa
from .woqlquery import WOQLQuery # noqa
from .woqlschema import * # noqa
from .woqlview import WOQLView # noqa
| 37.166667
| 65
| 0.784753
| 26
| 223
| 6.730769
| 0.346154
| 0.182857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165919
| 223
| 5
| 66
| 44.6
| 0.94086
| 0.107623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f731b9052df0f2dc9c9b090455306ea466ce939c
| 253
|
py
|
Python
|
cmdline_provenance/__init__.py
|
znicholls/cmdline_provenance
|
c9df55a9e4b0e7435499993c39eb9ff7e360b1c6
|
[
"MIT"
] | null | null | null |
cmdline_provenance/__init__.py
|
znicholls/cmdline_provenance
|
c9df55a9e4b0e7435499993c39eb9ff7e360b1c6
|
[
"MIT"
] | null | null | null |
cmdline_provenance/__init__.py
|
znicholls/cmdline_provenance
|
c9df55a9e4b0e7435499993c39eb9ff7e360b1c6
|
[
"MIT"
] | null | null | null |
"""Utilities for capturing the history of commands used to produce a given output"""
from .cmdline_provenance import new_log
from .cmdline_provenance import read_log
from .cmdline_provenance import write_log
__all__ = [new_log, read_log, write_log]
| 25.3
| 84
| 0.810277
| 38
| 253
| 5.052632
| 0.578947
| 0.171875
| 0.328125
| 0.421875
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134387
| 253
| 9
| 85
| 28.111111
| 0.876712
| 0.3083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f732baf10d5767e849b93e2c2732e51ee1fccc79
| 307
|
py
|
Python
|
src/employer/admin.py
|
vladimirtkach/yesjob
|
83800f4d29bf2dab30b14fc219d3150e3bc51e15
|
[
"MIT"
] | null | null | null |
src/employer/admin.py
|
vladimirtkach/yesjob
|
83800f4d29bf2dab30b14fc219d3150e3bc51e15
|
[
"MIT"
] | 18
|
2020-02-12T00:41:40.000Z
|
2022-02-10T12:00:03.000Z
|
src/employer/admin.py
|
vladimirtkach/yesjob
|
83800f4d29bf2dab30b14fc219d3150e3bc51e15
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Employer, ContactPerson, Language, Expenses, Vacancy
from django.contrib.auth.models import Permission
admin.site.register(Permission)
@admin.register(Employer, ContactPerson, Language, Expenses, Vacancy)
class AuthorAdmin(admin.ModelAdmin):
pass
| 27.909091
| 72
| 0.814332
| 36
| 307
| 6.944444
| 0.527778
| 0.08
| 0.136
| 0.296
| 0.352
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104235
| 307
| 10
| 73
| 30.7
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 0.428571
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
f7436fe7cde07ea874b0eae2dc638ad03d094681
| 99
|
py
|
Python
|
fpage/votes/__init__.py
|
nikolak/fpage
|
064ae766156559a65186370cc458d799f0d441d3
|
[
"Apache-2.0"
] | 3
|
2019-08-15T12:26:30.000Z
|
2020-12-28T05:20:11.000Z
|
fpage/votes/__init__.py
|
nikolak/fpage
|
064ae766156559a65186370cc458d799f0d441d3
|
[
"Apache-2.0"
] | null | null | null |
fpage/votes/__init__.py
|
nikolak/fpage
|
064ae766156559a65186370cc458d799f0d441d3
|
[
"Apache-2.0"
] | 2
|
2020-04-15T19:06:30.000Z
|
2020-12-28T05:34:09.000Z
|
# encoding: utf-8
'''Votes module handles comment and thread votes posting/models'''
import views
| 19.8
| 66
| 0.757576
| 14
| 99
| 5.357143
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0.141414
| 99
| 5
| 67
| 19.8
| 0.870588
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f74ea13a3775ce2d8e1a5f420efda4ed93074266
| 1,092
|
py
|
Python
|
bip_utils/addr/__init__.py
|
git-sgmoore/bip_utils
|
05772821132bf538a44f22a1f3c5ba1267ed54b2
|
[
"MIT"
] | null | null | null |
bip_utils/addr/__init__.py
|
git-sgmoore/bip_utils
|
05772821132bf538a44f22a1f3c5ba1267ed54b2
|
[
"MIT"
] | null | null | null |
bip_utils/addr/__init__.py
|
git-sgmoore/bip_utils
|
05772821132bf538a44f22a1f3c5ba1267ed54b2
|
[
"MIT"
] | null | null | null |
from bip_utils.addr.iaddr_encoder import IAddrEncoder
from bip_utils.addr.algo_addr import AlgoAddr
from bip_utils.addr.atom_addr import AtomAddr
from bip_utils.addr.avax_addr import AvaxPChainAddr, AvaxXChainAddr
from bip_utils.addr.egld_addr import EgldAddr
from bip_utils.addr.eth_addr import EthAddr
from bip_utils.addr.fil_addr import FilAddr
from bip_utils.addr.okex_addr import OkexAddr
from bip_utils.addr.nano_addr import NanoAddr
from bip_utils.addr.neo_addr import NeoAddr
from bip_utils.addr.one_addr import OneAddr
from bip_utils.addr.P2PKH_addr import P2PKHAddr, BchP2PKHAddr
from bip_utils.addr.P2SH_addr import P2SHAddr, BchP2SHAddr
from bip_utils.addr.P2WPKH_addr import P2WPKHAddr
from bip_utils.addr.sol_addr import SolAddr
from bip_utils.addr.substrate_addr import SubstrateEd25519Addr, SubstrateSr25519Addr
from bip_utils.addr.trx_addr import TrxAddr
from bip_utils.addr.xlm_addr import XlmAddr
from bip_utils.addr.xmr_addr import XmrAddr
from bip_utils.addr.xrp_addr import XrpAddr
from bip_utils.addr.xtz_addr import XtzAddr
from bip_utils.addr.zil_addr import ZilAddr
| 47.478261
| 84
| 0.871795
| 180
| 1,092
| 5.044444
| 0.3
| 0.169604
| 0.290749
| 0.387665
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018
| 0.084249
| 1,092
| 22
| 85
| 49.636364
| 0.89
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f74eb2a0e6a9da9a006170e2b8cffe1738b249ac
| 84
|
py
|
Python
|
guillotina_graphql/tests/conftest.py
|
masipcat/guillotina_graphql
|
509d5d5a8724188c9a14eeb09b0a08f420d9e98e
|
[
"MIT"
] | 1
|
2020-09-27T16:36:29.000Z
|
2020-09-27T16:36:29.000Z
|
guillotina_graphql/tests/conftest.py
|
masipcat/guillotina_graphql
|
509d5d5a8724188c9a14eeb09b0a08f420d9e98e
|
[
"MIT"
] | null | null | null |
guillotina_graphql/tests/conftest.py
|
masipcat/guillotina_graphql
|
509d5d5a8724188c9a14eeb09b0a08f420d9e98e
|
[
"MIT"
] | null | null | null |
pytest_plugins = ["guillotina.tests.fixtures", "guillotina_graphql.tests.fixtures"]
| 42
| 83
| 0.809524
| 9
| 84
| 7.333333
| 0.666667
| 0.393939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 84
| 1
| 84
| 84
| 0.825
| 0
| 0
| 0
| 0
| 0
| 0.690476
| 0.690476
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f7638a7def48225239054a35720fc6256195576d
| 35
|
py
|
Python
|
shopping_mall/celery_tasks/config.py
|
lzy00001/SHOP_CENTER
|
1e26b9694afc89d86f2f3db9c0b0ff1f98ab1369
|
[
"MIT"
] | 1
|
2021-07-28T13:28:39.000Z
|
2021-07-28T13:28:39.000Z
|
meiduo_mall/celery_tasks/config.py
|
Vent-Any/meiduo_first_commit
|
75c5da7a29733d8135342f4cd357c67c10a40ae1
|
[
"MIT"
] | null | null | null |
meiduo_mall/celery_tasks/config.py
|
Vent-Any/meiduo_first_commit
|
75c5da7a29733d8135342f4cd357c67c10a40ae1
|
[
"MIT"
] | null | null | null |
broker_url = 'redis://127.0.0.1/15'
| 35
| 35
| 0.657143
| 8
| 35
| 2.75
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.242424
| 0.057143
| 35
| 1
| 35
| 35
| 0.424242
| 0
| 0
| 0
| 0
| 0
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f7a002f8d965cd1445e5de1c58bef406f3d784b0
| 119
|
py
|
Python
|
actors/components/component.py
|
Catsuko/Westward
|
3c04df668f7e04ca45e622017ffa9dfe6d3c242c
|
[
"MIT"
] | 3
|
2019-12-22T22:44:43.000Z
|
2020-02-11T11:14:10.000Z
|
actors/components/component.py
|
Catsuko/Westward
|
3c04df668f7e04ca45e622017ffa9dfe6d3c242c
|
[
"MIT"
] | null | null | null |
actors/components/component.py
|
Catsuko/Westward
|
3c04df668f7e04ca45e622017ffa9dfe6d3c242c
|
[
"MIT"
] | null | null | null |
class Component:
def update(self):
return self
def print_to(self, x, y, media):
return media
| 14.875
| 36
| 0.588235
| 16
| 119
| 4.3125
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.327731
| 119
| 7
| 37
| 17
| 0.8625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 1
| 0.2
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e3caad574b90c7faa61a65f171b13ff5c6c8156d
| 86
|
py
|
Python
|
9781838981686_Code/CODES/todo_list/admin.py
|
josemarfp/django
|
21041201ccdf3cdf482d6d34d3dc02477c89621d
|
[
"MIT"
] | 7
|
2019-09-30T00:58:36.000Z
|
2021-07-25T05:24:59.000Z
|
9781838981686_Code/CODES/todo_list/admin.py
|
josemarfp/django
|
21041201ccdf3cdf482d6d34d3dc02477c89621d
|
[
"MIT"
] | null | null | null |
9781838981686_Code/CODES/todo_list/admin.py
|
josemarfp/django
|
21041201ccdf3cdf482d6d34d3dc02477c89621d
|
[
"MIT"
] | 5
|
2019-09-30T00:58:40.000Z
|
2021-03-11T11:20:42.000Z
|
from django.contrib import admin
from .models import List
admin.site.register(List)
| 14.333333
| 32
| 0.802326
| 13
| 86
| 5.307692
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127907
| 86
| 5
| 33
| 17.2
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e3d1309749b1c9518e1b12e4d6865585a5f0a9ca
| 65
|
py
|
Python
|
fabfile.py
|
russellwstanley/sg50hackathon-healthband
|
a9a5a0e453137defa7ca44ac3f206f125aadae28
|
[
"Apache-2.0"
] | null | null | null |
fabfile.py
|
russellwstanley/sg50hackathon-healthband
|
a9a5a0e453137defa7ca44ac3f206f125aadae28
|
[
"Apache-2.0"
] | null | null | null |
fabfile.py
|
russellwstanley/sg50hackathon-healthband
|
a9a5a0e453137defa7ca44ac3f206f125aadae28
|
[
"Apache-2.0"
] | null | null | null |
from fabric.api import *
def all_tests():
local('sbt test')
| 13
| 24
| 0.661538
| 10
| 65
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 65
| 4
| 25
| 16.25
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e3f6d5aaa4ece20b37f10b59a6a9182b7625b548
| 5,126
|
py
|
Python
|
test_fuzzywuzzymit_hypothesis.py
|
graingert/fuzzywuzzymit
|
cb85af3ccbcd71ce93217c2846550bc87f7d3a74
|
[
"MIT"
] | null | null | null |
test_fuzzywuzzymit_hypothesis.py
|
graingert/fuzzywuzzymit
|
cb85af3ccbcd71ce93217c2846550bc87f7d3a74
|
[
"MIT"
] | null | null | null |
test_fuzzywuzzymit_hypothesis.py
|
graingert/fuzzywuzzymit
|
cb85af3ccbcd71ce93217c2846550bc87f7d3a74
|
[
"MIT"
] | null | null | null |
from itertools import product
from functools import partial
from string import ascii_letters, digits, punctuation
from hypothesis import given, assume, settings
import hypothesis.strategies as st
import pytest
from fuzzywuzzymit import fuzz, process, utils
HYPOTHESIS_ALPHABET = ascii_letters + digits + punctuation
def scorers_processors():
"""
Generate a list of (scorer, processor) pairs for testing
:return: [(scorer, processor), ...]
"""
scorers = [fuzz.ratio,
fuzz.partial_ratio]
processors = [lambda x: x,
partial(utils.full_process, force_ascii=False),
partial(utils.full_process, force_ascii=True)]
splist = list(product(scorers, processors))
splist.extend(
[(fuzz.WRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.QRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.UWRatio, partial(utils.full_process, force_ascii=False)),
(fuzz.UQRatio, partial(utils.full_process, force_ascii=False)),
(fuzz.token_set_ratio, partial(utils.full_process, force_ascii=True)),
(fuzz.token_sort_ratio, partial(utils.full_process, force_ascii=True)),
(fuzz.partial_token_set_ratio, partial(utils.full_process, force_ascii=True)),
(fuzz.partial_token_sort_ratio, partial(utils.full_process, force_ascii=True))]
)
return splist
def full_scorers_processors():
"""
Generate a list of (scorer, processor) pairs for testing for scorers that use the full string only
:return: [(scorer, processor), ...]
"""
scorers = [fuzz.ratio]
processors = [lambda x: x,
partial(utils.full_process, force_ascii=False),
partial(utils.full_process, force_ascii=True)]
splist = list(product(scorers, processors))
splist.extend(
[(fuzz.WRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.QRatio, partial(utils.full_process, force_ascii=True)),
(fuzz.UWRatio, partial(utils.full_process, force_ascii=False)),
(fuzz.UQRatio, partial(utils.full_process, force_ascii=False))]
)
return splist
@pytest.mark.parametrize('scorer,processor',
scorers_processors())
@given(data=st.data())
@settings(max_examples=20, deadline=5000)
def test_identical_strings_extracted(scorer, processor, data):
"""
Test that identical strings will always return a perfect match.
:param scorer:
:param processor:
:param data:
:return:
"""
# Draw a list of random strings
strings = data.draw(
st.lists(
st.text(min_size=10, max_size=100, alphabet=HYPOTHESIS_ALPHABET),
min_size=1,
max_size=10
)
)
# Draw a random integer for the index in that list
choiceidx = data.draw(st.integers(min_value=0, max_value=(len(strings) - 1)))
# Extract our choice from the list
choice = strings[choiceidx]
# Check process doesn't make our choice the empty string
assume(processor(choice) != '')
# Extract all perfect matches
result = process.extractBests(choice,
strings,
scorer=scorer,
processor=processor,
score_cutoff=100,
limit=None)
# Check we get a result
assert result != []
# Check the original is in the list
assert (choice, 100) in result
@pytest.mark.parametrize('scorer,processor',
full_scorers_processors())
@given(data=st.data())
@settings(max_examples=20, deadline=5000)
def test_only_identical_strings_extracted(scorer, processor, data):
"""
Test that only identical (post processing) strings score 100 on the test.
If two strings are not identical then using full comparison methods they should
not be a perfect (100) match.
:param scorer:
:param processor:
:param data:
:return:
"""
# Draw a list of random strings
strings = data.draw(
st.lists(
st.text(min_size=10, max_size=100, alphabet=HYPOTHESIS_ALPHABET),
min_size=1,
max_size=10)
)
# Draw a random integer for the index in that list
choiceidx = data.draw(st.integers(min_value=0, max_value=(len(strings) - 1)))
# Extract our choice from the list
choice = strings[choiceidx]
# Check process doesn't make our choice the empty string
assume(processor(choice) != '')
# Extract all perfect matches
result = process.extractBests(choice,
strings,
scorer=scorer,
processor=processor,
score_cutoff=100,
limit=None)
# Check we get a result
assert result != []
# Check THE ONLY result(s) we get are a perfect match for the (processed) original data
pchoice = processor(choice)
for r in result:
assert pchoice == processor(r[0])
| 33.285714
| 102
| 0.62778
| 603
| 5,126
| 5.208955
| 0.215589
| 0.061127
| 0.081503
| 0.11716
| 0.782235
| 0.759312
| 0.737345
| 0.737345
| 0.702961
| 0.702961
| 0
| 0.012952
| 0.277019
| 5,126
| 153
| 103
| 33.503268
| 0.834593
| 0.223371
| 0
| 0.604651
| 0
| 0
| 0.008292
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 1
| 0.046512
| false
| 0
| 0.081395
| 0
| 0.151163
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
540759cc2ec5d3628b6f37d83b3b4d5acb365c5a
| 69
|
py
|
Python
|
pangea/contrib/metasub/models.py
|
LongTailBio/pangea-django
|
630551dded7f9e38f95eda8c36039e0de46961e7
|
[
"MIT"
] | null | null | null |
pangea/contrib/metasub/models.py
|
LongTailBio/pangea-django
|
630551dded7f9e38f95eda8c36039e0de46961e7
|
[
"MIT"
] | 27
|
2020-03-26T02:55:12.000Z
|
2022-03-12T00:55:04.000Z
|
pangea/contrib/metasub/models.py
|
LongTailBio/pangea-django
|
630551dded7f9e38f95eda8c36039e0de46961e7
|
[
"MIT"
] | 1
|
2021-09-14T08:15:54.000Z
|
2021-09-14T08:15:54.000Z
|
from django.db import models
# MetaSUB does not require any models.
| 17.25
| 38
| 0.782609
| 11
| 69
| 4.909091
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 69
| 3
| 39
| 23
| 0.947368
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
541b0427980a143d3f9180266469a4442b23e29c
| 270
|
py
|
Python
|
rebase/validators/__init__.py
|
dciccale/rebase
|
e218bde43a31cb8b269733d262e978e4872f969f
|
[
"Apache-2.0"
] | 15
|
2018-07-06T13:40:27.000Z
|
2021-08-05T21:33:31.000Z
|
rebase/validators/__init__.py
|
dciccale/rebase
|
e218bde43a31cb8b269733d262e978e4872f969f
|
[
"Apache-2.0"
] | 1
|
2021-04-01T10:24:32.000Z
|
2021-04-01T10:24:32.000Z
|
rebase/validators/__init__.py
|
dciccale/rebase
|
e218bde43a31cb8b269733d262e978e4872f969f
|
[
"Apache-2.0"
] | 5
|
2018-07-06T13:40:31.000Z
|
2020-10-28T19:51:03.000Z
|
from .bool_validator import BoolValidator
from .integer_validator import IntegerValidator
from .nested_validator import NestedValidator
from .range_validator import RangeValidator
from .string_validator import StringValidator
from .alnum_validator import AlnumValidator
| 38.571429
| 47
| 0.888889
| 30
| 270
| 7.8
| 0.5
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 270
| 6
| 48
| 45
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
580abb9c485ef9c70248b90370d50df2d09c5938
| 68
|
py
|
Python
|
mypackage/__init__.py
|
liuxk99/python-package-template
|
b657da210c066413ff32bf065de75a3f76e6c730
|
[
"Apache-2.0"
] | null | null | null |
mypackage/__init__.py
|
liuxk99/python-package-template
|
b657da210c066413ff32bf065de75a3f76e6c730
|
[
"Apache-2.0"
] | null | null | null |
mypackage/__init__.py
|
liuxk99/python-package-template
|
b657da210c066413ff32bf065de75a3f76e6c730
|
[
"Apache-2.0"
] | null | null | null |
from .functions import average, power
from .greet import SayHello
| 22.666667
| 38
| 0.794118
| 9
| 68
| 6
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161765
| 68
| 2
| 39
| 34
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
58102e9b56ede7dc74a59d81e9fa3500796c8de0
| 1,060
|
py
|
Python
|
blitz_api/migrations/0015_user_city_and_personnal_restrictions_field.py
|
Jerome-Celle/Blitz-API
|
7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0
|
[
"MIT"
] | 3
|
2019-10-22T00:16:49.000Z
|
2021-07-15T07:44:43.000Z
|
blitz_api/migrations/0015_user_city_and_personnal_restrictions_field.py
|
Jerome-Celle/Blitz-API
|
7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0
|
[
"MIT"
] | 1,183
|
2018-04-19T18:40:30.000Z
|
2022-03-31T21:05:05.000Z
|
blitz_api/migrations/0015_user_city_and_personnal_restrictions_field.py
|
Jerome-Celle/Blitz-API
|
7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0
|
[
"MIT"
] | 12
|
2018-04-17T19:16:42.000Z
|
2022-01-27T00:19:59.000Z
|
# Generated by Django 2.0.8 on 2018-12-16 19:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blitz_api', '0014_model_translation'),
]
operations = [
migrations.AddField(
model_name='historicaluser',
name='city',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='City'),
),
migrations.AddField(
model_name='historicaluser',
name='personnal_restrictions',
field=models.TextField(blank=True, null=True, verbose_name='Personnal restrictions'),
),
migrations.AddField(
model_name='user',
name='city',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='City'),
),
migrations.AddField(
model_name='user',
name='personnal_restrictions',
field=models.TextField(blank=True, null=True, verbose_name='Personnal restrictions'),
),
]
| 31.176471
| 97
| 0.603774
| 107
| 1,060
| 5.841122
| 0.401869
| 0.1152
| 0.1472
| 0.1728
| 0.7584
| 0.7584
| 0.6144
| 0.6144
| 0.6144
| 0.6144
| 0
| 0.030026
| 0.277358
| 1,060
| 33
| 98
| 32.121212
| 0.785901
| 0.042453
| 0
| 0.740741
| 1
| 0
| 0.168806
| 0.065153
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
583e9e2b1264d8360c952269eb38872eb5643d3d
| 7,501
|
py
|
Python
|
test/unit/agent/managers/nginx.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 308
|
2015-11-17T13:15:33.000Z
|
2022-03-24T12:03:40.000Z
|
test/unit/agent/managers/nginx.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 211
|
2015-11-16T15:27:41.000Z
|
2022-03-28T16:20:15.000Z
|
test/unit/agent/managers/nginx.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 80
|
2015-11-16T18:20:30.000Z
|
2022-03-02T12:47:56.000Z
|
# -*- coding: utf-8 -*-
import re
from time import sleep
from hamcrest import *
from amplify.agent.common.util import subp
from amplify.agent.common.context import context
from amplify.agent.managers.nginx import NginxManager
from test.base import RealNginxTestCase, container_test, RealNginxSupervisordTestCase
from test.helpers import DummyRootObject
__author__ = "Mike Belov"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "Mike Belov"
__email__ = "dedm@nginx.com"
class NginxManagerTestCase(RealNginxTestCase):
def get_master_workers(self):
master, workers = None, []
ps, _ = subp.call('ps -xa -o pid,ppid,command | egrep "PID|nginx" | grep -v egrep')
for line in ps:
# 21355 1 nginx: master process /usr/sbin/nginx
gwe = re.match(r'\s*(?P<pid>\d+)\s+(?P<ppid>\d+)\s+(?P<cmd>.+)\s*', line)
# if not parsed - switch to next line
if not gwe or 'py.test' in line:
continue
pid = int(gwe.group('pid'))
cmd = gwe.group('cmd')
if 'nginx: master process' in cmd:
master = pid
else:
workers.append(pid)
return master, workers
def test_find_all(self):
manager = NginxManager()
nginxes = manager._find_all()
assert_that(nginxes, has_length(1))
definition, data = nginxes.pop(0)
assert_that(data, has_key('pid'))
assert_that(data, has_key('workers'))
# get ps info
master, workers = self.get_master_workers()
assert_that(master, equal_to(data['pid']))
assert_that(workers, equal_to(data['workers']))
def test_restart(self):
old_master, old_workers = self.get_master_workers()
manager = NginxManager()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.pid, equal_to(old_master))
assert_that(obj.workers, equal_to(old_workers))
self.restart_nginx()
new_master, new_workers = self.get_master_workers()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.pid, not_(equal_to(old_master)))
assert_that(obj.pid, equal_to(new_master))
assert_that(obj.workers, not_(equal_to(old_workers)))
assert_that(obj.workers, equal_to(new_workers))
def test_reload(self):
old_master, old_workers = self.get_master_workers()
manager = NginxManager()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
# The following assertion is unreliable for some reason.
assert_that(obj.pid, equal_to(old_master))
assert_that(obj.workers, equal_to(old_workers))
self.reload_nginx()
sleep(1) # nginx needs some time to reload
new_master, new_workers = self.get_master_workers()
assert_that(new_master, equal_to(old_master))
manager._discover_objects()
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.pid, equal_to(old_master))
assert_that(obj.workers, not_(equal_to(old_workers)))
assert_that(obj.workers, equal_to(new_workers))
def test_two_instances(self):
manager = NginxManager()
manager._discover_objects()
obj = manager.objects.find_all(types=manager.types)[0]
self.start_second_nginx()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(2))
local_ids = map(lambda x: x.local_id, manager.objects.find_all(types=manager.types))
assert_that(local_ids, has_item(obj.local_id))
def test_find_none(self):
# Kill running NGINX so that it finds None
subp.call('pgrep nginx |sudo xargs kill -9', check=False)
self.running = False
# Setup dummy object
context.objects.register(DummyRootObject())
manager = NginxManager()
nginxes = manager._find_all()
assert_that(nginxes, has_length(0))
root_object = context.objects.root_object
assert_that(root_object.eventd.current, has_length(1))
# Reset objects...
context.objects = None
context._setup_object_tank()
@container_test
class DockerNginxManagerTestCase(NginxManagerTestCase):
def test_restart(self):
old_master, old_workers = self.get_master_workers()
manager = NginxManager()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.pid, equal_to(old_master))
assert_that(obj.workers, equal_to(old_workers))
assert_that(obj.type, equal_to('container_nginx'))
self.restart_nginx()
new_master, new_workers = self.get_master_workers()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.pid, not_(equal_to(old_master)))
assert_that(obj.pid, equal_to(new_master))
assert_that(obj.workers, not_(equal_to(old_workers)))
assert_that(obj.workers, equal_to(new_workers))
assert_that(obj.type, equal_to('container_nginx'))
def test_reload(self):
old_master, old_workers = self.get_master_workers()
manager = NginxManager()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
# The following assertion is unreliable for some reason.
assert_that(obj.pid, equal_to(old_master))
assert_that(obj.workers, equal_to(old_workers))
assert_that(obj.type, equal_to('container_nginx'))
self.reload_nginx()
sleep(1) # nginx needs some time to reload
new_master, new_workers = self.get_master_workers()
assert_that(new_master, equal_to(old_master))
manager._discover_objects()
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.pid, equal_to(old_master))
assert_that(obj.workers, not_(equal_to(old_workers)))
assert_that(obj.workers, equal_to(new_workers))
assert_that(obj.type, equal_to('container_nginx'))
def test_two_instances(self):
manager = NginxManager()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(1))
obj = manager.objects.find_all(types=manager.types)[0]
assert_that(obj.type, equal_to('container_nginx'))
self.start_second_nginx()
manager._discover_objects()
assert_that(manager.objects.find_all(types=manager.types), has_length(2))
local_ids = map(lambda x: x.local_id, manager.objects.find_all(types=manager.types))
assert_that(local_ids, has_item(obj.local_id))
class SupervisorNginxManagerTestCase(NginxManagerTestCase, RealNginxSupervisordTestCase):
pass
| 36.950739
| 92
| 0.675377
| 970
| 7,501
| 4.941237
| 0.156701
| 0.09806
| 0.073232
| 0.092009
| 0.720843
| 0.712497
| 0.712497
| 0.704778
| 0.704778
| 0.699562
| 0
| 0.005604
| 0.214905
| 7,501
| 202
| 93
| 37.133663
| 0.808287
| 0.049327
| 0
| 0.662069
| 0
| 0.013793
| 0.049164
| 0.006743
| 0
| 0
| 0
| 0
| 0.324138
| 1
| 0.062069
| false
| 0.006897
| 0.055172
| 0
| 0.144828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
586e3af4d046eedb2e29d2c2113fddfb439933b0
| 157
|
py
|
Python
|
environments/__init__.py
|
seungjaeryanlee/implementations-dqn
|
134df06ba92fe3c35a74ee9e1a8be1e6460a345e
|
[
"MIT"
] | 3
|
2019-12-13T08:48:58.000Z
|
2020-07-27T03:52:36.000Z
|
environments/__init__.py
|
seungjaeryanlee/implementations-dqn
|
134df06ba92fe3c35a74ee9e1a8be1e6460a345e
|
[
"MIT"
] | 3
|
2019-07-21T14:16:34.000Z
|
2019-08-03T10:06:02.000Z
|
environments/__init__.py
|
seungjaeryanlee/implementations-dqn
|
134df06ba92fe3c35a74ee9e1a8be1e6460a345e
|
[
"MIT"
] | null | null | null |
# flake8: noqa
from .atari_preprocessing import AtariPreprocessing
from .atari_wrappers import make_atari, wrap_deepmind
from .frame_stack import FrameStack
| 31.4
| 53
| 0.859873
| 20
| 157
| 6.5
| 0.7
| 0.138462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007092
| 0.101911
| 157
| 4
| 54
| 39.25
| 0.914894
| 0.076433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
588e008d7ada558cf57a0d1982a83022c84c5038
| 515
|
py
|
Python
|
catkin_ws/src/00-infrastructure/what_the_duck/include/what_the_duck/checks/__init__.py
|
himarsmty/duckiebot
|
fe95943f29716908cef09a5054f4efcf5a1de33e
|
[
"CC-BY-2.0"
] | null | null | null |
catkin_ws/src/00-infrastructure/what_the_duck/include/what_the_duck/checks/__init__.py
|
himarsmty/duckiebot
|
fe95943f29716908cef09a5054f4efcf5a1de33e
|
[
"CC-BY-2.0"
] | null | null | null |
catkin_ws/src/00-infrastructure/what_the_duck/include/what_the_duck/checks/__init__.py
|
himarsmty/duckiebot
|
fe95943f29716908cef09a5054f4efcf5a1de33e
|
[
"CC-BY-2.0"
] | null | null | null |
from .command_output import *
from .package_import import *
from .date import *
from .duckietown_repo import *
from .existence import *
from .groups import *
from .package_import import *
from .passwordless import *
from .permissions import *
from .file_contains import *
from .environment_variable import *
from .wifi import *
from .hostname import *
from .shells import *
from .github import *
from .internet_connected import *
from .scuderia_checks import *
from .machines_checks import *
from .packages import *
| 27.105263
| 35
| 0.780583
| 66
| 515
| 5.954545
| 0.393939
| 0.458015
| 0.086514
| 0.117048
| 0.167939
| 0.167939
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145631
| 515
| 19
| 36
| 27.105263
| 0.893182
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.052632
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
54bf005c173116102680e5fade2bed97ad3d61df
| 51
|
py
|
Python
|
src/wake.py
|
MakeNTU/2021_team00_WakeNTU
|
78a1a270d6638199b0279160594a87a902077e26
|
[
"Apache-2.0"
] | 1
|
2021-05-30T08:20:37.000Z
|
2021-05-30T08:20:37.000Z
|
src/wake.py
|
MakeNTU/2021_team00_WakeNTU
|
78a1a270d6638199b0279160594a87a902077e26
|
[
"Apache-2.0"
] | null | null | null |
src/wake.py
|
MakeNTU/2021_team00_WakeNTU
|
78a1a270d6638199b0279160594a87a902077e26
|
[
"Apache-2.0"
] | null | null | null |
print("2021 MakeNTU Cabin Fever")
print("WakeNTU")
| 17
| 33
| 0.745098
| 7
| 51
| 5.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.098039
| 51
| 2
| 34
| 25.5
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0.607843
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
49e08bec9020ee52efb66505e7695c3c8dc7a57e
| 430
|
py
|
Python
|
g_selfatt/nn/__init__.py
|
dwromero/g_selfatt
|
14204b3eb2a9d70329ee8f33a04ac5965c11e8c6
|
[
"MIT"
] | 19
|
2021-03-11T08:58:32.000Z
|
2022-03-23T07:55:43.000Z
|
g_selfatt/nn/__init__.py
|
dwromero/g_selfatt
|
14204b3eb2a9d70329ee8f33a04ac5965c11e8c6
|
[
"MIT"
] | 4
|
2021-11-25T07:52:41.000Z
|
2022-02-16T16:41:37.000Z
|
g_selfatt/nn/__init__.py
|
dwromero/g_selfatt
|
14204b3eb2a9d70329ee8f33a04ac5965c11e8c6
|
[
"MIT"
] | 2
|
2021-10-29T21:07:04.000Z
|
2021-12-10T05:14:17.000Z
|
from . import activations
from .cropping import Crop
from .group_local_self_attention import GroupLocalSelfAttention
from .group_self_attention import GroupSelfAttention
from .layers import Conv2d1x1, Conv3d1x1, LayerNorm
from .lift_local_self_attention import LiftLocalSelfAttention
from .lift_self_attention import LiftSelfAttention
from .rd_self_attention import RdSelfAttention
from .transformer_block import TransformerBlock
| 43
| 63
| 0.886047
| 50
| 430
| 7.36
| 0.46
| 0.17663
| 0.258152
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015306
| 0.088372
| 430
| 9
| 64
| 47.777778
| 0.923469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
49e5882ac8056d23bab80938ba5c609c52d48274
| 728
|
py
|
Python
|
lib/portal/portal/PortalAuthenticatorMinimal.py
|
jumpscale7/jumpscale_portal
|
8c99265e48f85643f8a52bc40a23f5266fb09231
|
[
"Apache-2.0"
] | 2
|
2016-04-14T14:05:01.000Z
|
2016-04-21T07:20:36.000Z
|
lib/portal/portal/PortalAuthenticatorMinimal.py
|
jumpscale7/jumpscale_portal
|
8c99265e48f85643f8a52bc40a23f5266fb09231
|
[
"Apache-2.0"
] | 13
|
2016-03-07T12:07:15.000Z
|
2018-02-28T13:11:59.000Z
|
lib/portal/portal/PortalAuthenticatorMinimal.py
|
jumpscale7/jumpscale_portal
|
8c99265e48f85643f8a52bc40a23f5266fb09231
|
[
"Apache-2.0"
] | 5
|
2016-03-08T07:49:51.000Z
|
2018-10-19T13:57:04.000Z
|
from JumpScale import j
class PortalAuthenticatorMinimal(object):
"""
Main functionality is to provide authenticate() function and other helper functions
Those functions are all added to client
"""
def __init__(self, instance='main'):
pass
def authenticate(self, login, password):
return True
def getGroups(self, username):
return ['guest']
def getUserFromKey(self, key):
return "guest"
def getUserSpaces(self, username, **kwargs):
#TODO
return []
def getUserSpacesObjects(self, username):
return []
def getUserSpaceRights(self, username, space, **kwargs):
return 'guest', 'rwa'
| 23.483871
| 87
| 0.614011
| 71
| 728
| 6.239437
| 0.619718
| 0.108352
| 0.081264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.29533
| 728
| 31
| 88
| 23.483871
| 0.863548
| 0.175824
| 0
| 0.125
| 0
| 0
| 0.037866
| 0
| 0
| 0
| 0
| 0.032258
| 0
| 1
| 0.4375
| false
| 0.125
| 0.0625
| 0.375
| 0.9375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
b72853b9cf7b21cd91955d815d15b8fde0626bc6
| 95
|
py
|
Python
|
ecommerce/shipping.py
|
MKen212/pymosh
|
6db31f181081baf3e4cc3d7228a3480b16bd3cda
|
[
"MIT"
] | null | null | null |
ecommerce/shipping.py
|
MKen212/pymosh
|
6db31f181081baf3e4cc3d7228a3480b16bd3cda
|
[
"MIT"
] | null | null | null |
ecommerce/shipping.py
|
MKen212/pymosh
|
6db31f181081baf3e4cc3d7228a3480b16bd3cda
|
[
"MIT"
] | null | null | null |
# Shipping Module in e-commerce package
def calc_shipping():
print("Calculating Shipping")
| 23.75
| 39
| 0.757895
| 12
| 95
| 5.916667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147368
| 95
| 4
| 40
| 23.75
| 0.876543
| 0.389474
| 0
| 0
| 0
| 0
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
b72e4b337964a91c1274110a4c10af193fc86825
| 407,293
|
py
|
Python
|
ideas/padraig/generatedFromV2Schema/neuroml/v2/__init__.py
|
mattions/libNeuroML
|
c623292c7832c84421d55799efdbd7711cca54ae
|
[
"BSD-3-Clause"
] | 1
|
2016-05-01T13:07:41.000Z
|
2016-05-01T13:07:41.000Z
|
ideas/padraig/generatedFromV2Schema/neuroml/v2/__init__.py
|
mattions/libNeuroML
|
c623292c7832c84421d55799efdbd7711cca54ae
|
[
"BSD-3-Clause"
] | null | null | null |
ideas/padraig/generatedFromV2Schema/neuroml/v2/__init__.py
|
mattions/libNeuroML
|
c623292c7832c84421d55799efdbd7711cca54ae
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated by generateDS.py version 2.7b_pg.
#
import sys
import getopt
import re as re_
etree_ = None
Verbose_import_ = False
( XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError("Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(node, 'Requires sequence of booleans ("true", "1", "false", "0")')
return input_data
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace,name)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s",\n' % \
(self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class Annotation(GeneratedsSuper):
"""Placeholder for MIRIAM related metadata, among others."""
subclass = None
superclass = None
def __init__(self, anytypeobjs_=None):
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if Annotation.subclass:
return Annotation.subclass(*args_, **kwargs_)
else:
return Annotation(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='', name_='Annotation', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Annotation')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Annotation'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='Annotation', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Annotation'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'Annotation')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class Annotation
class includeType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, href=None, valueOf_=None, mixedclass_=None, content_=None):
self.href = _cast(None, href)
self.valueOf_ = valueOf_
if mixedclass_ is None:
self.mixedclass_ = MixedContainer
else:
self.mixedclass_ = mixedclass_
if content_ is None:
self.content_ = []
else:
self.content_ = content_
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if includeType.subclass:
return includeType.subclass(*args_, **kwargs_)
else:
return includeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_href(self): return self.href
def set_href(self, href): self.href = href
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='', name_='includeType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='includeType')
outfile.write('>')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='includeType'):
if self.href is not None and 'href' not in already_processed:
already_processed.append('href')
outfile.write(' href=%s' % (self.gds_format_string(quote_attrib(self.href).encode(ExternalEncoding), input_name='href'), ))
def exportChildren(self, outfile, level, namespace_='', name_='includeType', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='includeType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.href is not None and 'href' not in already_processed:
already_processed.append('href')
showIndent(outfile, level)
outfile.write('href = "%s",\n' % (self.href,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
if node.text is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', node.text)
self.content_.append(obj_)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('href', node)
if value is not None and 'href' not in already_processed:
already_processed.append('href')
self.href = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if not fromsubclass_ and child_.tail is not None:
obj_ = self.mixedclass_(MixedContainer.CategoryText,
MixedContainer.TypeNone, '', child_.tail)
self.content_.append(obj_)
pass
# end class includeType
class VoltageConcDepBlock(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, blockConcentration=None, scalingConc=None, type_=None, species=None, scalingVolt=None):
self.blockConcentration = _cast(None, blockConcentration)
self.scalingConc = _cast(None, scalingConc)
self.type_ = _cast(None, type_)
self.species = _cast(None, species)
self.scalingVolt = _cast(None, scalingVolt)
pass
def factory(*args_, **kwargs_):
if VoltageConcDepBlock.subclass:
return VoltageConcDepBlock.subclass(*args_, **kwargs_)
else:
return VoltageConcDepBlock(*args_, **kwargs_)
factory = staticmethod(factory)
def get_blockConcentration(self): return self.blockConcentration
def set_blockConcentration(self, blockConcentration): self.blockConcentration = blockConcentration
def validate_Nml2Quantity_concentration(self, value):
# Validate type Nml2Quantity_concentration, a restriction on xs:string.
pass
def get_scalingConc(self): return self.scalingConc
def set_scalingConc(self, scalingConc): self.scalingConc = scalingConc
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_species(self): return self.species
def set_species(self, species): self.species = species
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_scalingVolt(self): return self.scalingVolt
def set_scalingVolt(self, scalingVolt): self.scalingVolt = scalingVolt
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='VoltageConcDepBlock', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VoltageConcDepBlock')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='VoltageConcDepBlock'):
if self.blockConcentration is not None and 'blockConcentration' not in already_processed:
already_processed.append('blockConcentration')
outfile.write(' blockConcentration=%s' % (quote_attrib(self.blockConcentration), ))
if self.scalingConc is not None and 'scalingConc' not in already_processed:
already_processed.append('scalingConc')
outfile.write(' scalingConc=%s' % (quote_attrib(self.scalingConc), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.species is not None and 'species' not in already_processed:
already_processed.append('species')
outfile.write(' species=%s' % (quote_attrib(self.species), ))
if self.scalingVolt is not None and 'scalingVolt' not in already_processed:
already_processed.append('scalingVolt')
outfile.write(' scalingVolt=%s' % (quote_attrib(self.scalingVolt), ))
def exportChildren(self, outfile, level, namespace_='', name_='VoltageConcDepBlock', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VoltageConcDepBlock'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.blockConcentration is not None and 'blockConcentration' not in already_processed:
already_processed.append('blockConcentration')
showIndent(outfile, level)
outfile.write('blockConcentration = "%s",\n' % (self.blockConcentration,))
if self.scalingConc is not None and 'scalingConc' not in already_processed:
already_processed.append('scalingConc')
showIndent(outfile, level)
outfile.write('scalingConc = "%s",\n' % (self.scalingConc,))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
if self.species is not None and 'species' not in already_processed:
already_processed.append('species')
showIndent(outfile, level)
outfile.write('species = "%s",\n' % (self.species,))
if self.scalingVolt is not None and 'scalingVolt' not in already_processed:
already_processed.append('scalingVolt')
showIndent(outfile, level)
outfile.write('scalingVolt = "%s",\n' % (self.scalingVolt,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('blockConcentration', node)
if value is not None and 'blockConcentration' not in already_processed:
already_processed.append('blockConcentration')
self.blockConcentration = value
self.validate_Nml2Quantity_concentration(self.blockConcentration) # validate type Nml2Quantity_concentration
value = find_attr_value_('scalingConc', node)
if value is not None and 'scalingConc' not in already_processed:
already_processed.append('scalingConc')
self.scalingConc = value
self.validate_Nml2Quantity_concentration(self.scalingConc) # validate type Nml2Quantity_concentration
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
value = find_attr_value_('species', node)
if value is not None and 'species' not in already_processed:
already_processed.append('species')
self.species = value
self.validate_NmlId(self.species) # validate type NmlId
value = find_attr_value_('scalingVolt', node)
if value is not None and 'scalingVolt' not in already_processed:
already_processed.append('scalingVolt')
self.scalingVolt = value
self.validate_Nml2Quantity_voltage(self.scalingVolt) # validate type Nml2Quantity_voltage
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class VoltageConcDepBlock
class StpMechanism(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, tauRec=None, tauFac=None, initReleaseProb=None):
self.tauRec = _cast(None, tauRec)
self.tauFac = _cast(None, tauFac)
self.initReleaseProb = _cast(None, initReleaseProb)
pass
def factory(*args_, **kwargs_):
if StpMechanism.subclass:
return StpMechanism.subclass(*args_, **kwargs_)
else:
return StpMechanism(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tauRec(self): return self.tauRec
def set_tauRec(self, tauRec): self.tauRec = tauRec
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_tauFac(self): return self.tauFac
def set_tauFac(self, tauFac): self.tauFac = tauFac
def get_initReleaseProb(self): return self.initReleaseProb
def set_initReleaseProb(self, initReleaseProb): self.initReleaseProb = initReleaseProb
def validate_ZeroToOne(self, value):
# Validate type ZeroToOne, a restriction on xs:double.
pass
def export(self, outfile, level, namespace_='', name_='StpMechanism', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='StpMechanism')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='StpMechanism'):
if self.tauRec is not None and 'tauRec' not in already_processed:
already_processed.append('tauRec')
outfile.write(' tauRec=%s' % (quote_attrib(self.tauRec), ))
if self.tauFac is not None and 'tauFac' not in already_processed:
already_processed.append('tauFac')
outfile.write(' tauFac=%s' % (quote_attrib(self.tauFac), ))
if self.initReleaseProb is not None and 'initReleaseProb' not in already_processed:
already_processed.append('initReleaseProb')
outfile.write(' initReleaseProb=%s' % (quote_attrib(self.initReleaseProb), ))
def exportChildren(self, outfile, level, namespace_='', name_='StpMechanism', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='StpMechanism'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.tauRec is not None and 'tauRec' not in already_processed:
already_processed.append('tauRec')
showIndent(outfile, level)
outfile.write('tauRec = "%s",\n' % (self.tauRec,))
if self.tauFac is not None and 'tauFac' not in already_processed:
already_processed.append('tauFac')
showIndent(outfile, level)
outfile.write('tauFac = "%s",\n' % (self.tauFac,))
if self.initReleaseProb is not None and 'initReleaseProb' not in already_processed:
already_processed.append('initReleaseProb')
showIndent(outfile, level)
outfile.write('initReleaseProb = %e,\n' % (self.initReleaseProb,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('tauRec', node)
if value is not None and 'tauRec' not in already_processed:
already_processed.append('tauRec')
self.tauRec = value
self.validate_Nml2Quantity_time(self.tauRec) # validate type Nml2Quantity_time
value = find_attr_value_('tauFac', node)
if value is not None and 'tauFac' not in already_processed:
already_processed.append('tauFac')
self.tauFac = value
self.validate_Nml2Quantity_time(self.tauFac) # validate type Nml2Quantity_time
value = find_attr_value_('initReleaseProb', node)
if value is not None and 'initReleaseProb' not in already_processed:
already_processed.append('initReleaseProb')
try:
self.initReleaseProb = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (initReleaseProb): %s' % exp)
self.validate_ZeroToOne(self.initReleaseProb) # validate type ZeroToOne
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class StpMechanism
class SegmentParent(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, fractionAlong='1', segment=None):
self.fractionAlong = _cast(None, fractionAlong)
self.segment = _cast(None, segment)
pass
def factory(*args_, **kwargs_):
if SegmentParent.subclass:
return SegmentParent.subclass(*args_, **kwargs_)
else:
return SegmentParent(*args_, **kwargs_)
factory = staticmethod(factory)
def get_fractionAlong(self): return self.fractionAlong
def set_fractionAlong(self, fractionAlong): self.fractionAlong = fractionAlong
def validate_ZeroToOne(self, value):
# Validate type ZeroToOne, a restriction on xs:double.
pass
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def validate_SegmentId(self, value):
# Validate type SegmentId, a restriction on xs:nonNegativeInteger.
pass
def export(self, outfile, level, namespace_='', name_='SegmentParent', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SegmentParent')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SegmentParent'):
if self.fractionAlong is not None and 'fractionAlong' not in already_processed:
already_processed.append('fractionAlong')
outfile.write(' fractionAlong=%s' % (quote_attrib(self.fractionAlong), ))
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
outfile.write(' segment=%s' % (quote_attrib(self.segment), ))
def exportChildren(self, outfile, level, namespace_='', name_='SegmentParent', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='SegmentParent'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.fractionAlong is not None and 'fractionAlong' not in already_processed:
already_processed.append('fractionAlong')
showIndent(outfile, level)
outfile.write('fractionAlong = %e,\n' % (self.fractionAlong,))
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
showIndent(outfile, level)
outfile.write('segment = %d,\n' % (self.segment,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('fractionAlong', node)
if value is not None and 'fractionAlong' not in already_processed:
already_processed.append('fractionAlong')
try:
self.fractionAlong = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (fractionAlong): %s' % exp)
self.validate_ZeroToOne(self.fractionAlong) # validate type ZeroToOne
value = find_attr_value_('segment', node)
if value is not None and 'segment' not in already_processed:
already_processed.append('segment')
try:
self.segment = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.segment < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
self.validate_SegmentId(self.segment) # validate type SegmentId
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SegmentParent
class Point3DWithDiam(GeneratedsSuper):
"""A 3D point with diameter."""
subclass = None
superclass = None
def __init__(self, y=None, x=None, z=None, diameter=None):
self.y = _cast(float, y)
self.x = _cast(float, x)
self.z = _cast(float, z)
self.diameter = _cast(float, diameter)
pass
def factory(*args_, **kwargs_):
if Point3DWithDiam.subclass:
return Point3DWithDiam.subclass(*args_, **kwargs_)
else:
return Point3DWithDiam(*args_, **kwargs_)
factory = staticmethod(factory)
def get_y(self): return self.y
def set_y(self, y): self.y = y
def get_x(self): return self.x
def set_x(self, x): self.x = x
def get_z(self): return self.z
def set_z(self, z): self.z = z
def get_diameter(self): return self.diameter
def set_diameter(self, diameter): self.diameter = diameter
def export(self, outfile, level, namespace_='', name_='Point3DWithDiam', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Point3DWithDiam')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Point3DWithDiam'):
if self.y is not None and 'y' not in already_processed:
already_processed.append('y')
outfile.write(' y="%s"' % self.gds_format_double(self.y, input_name='y'))
if self.x is not None and 'x' not in already_processed:
already_processed.append('x')
outfile.write(' x="%s"' % self.gds_format_double(self.x, input_name='x'))
if self.z is not None and 'z' not in already_processed:
already_processed.append('z')
outfile.write(' z="%s"' % self.gds_format_double(self.z, input_name='z'))
if self.diameter is not None and 'diameter' not in already_processed:
already_processed.append('diameter')
outfile.write(' diameter="%s"' % self.gds_format_double(self.diameter, input_name='diameter'))
def exportChildren(self, outfile, level, namespace_='', name_='Point3DWithDiam', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Point3DWithDiam'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.y is not None and 'y' not in already_processed:
already_processed.append('y')
showIndent(outfile, level)
outfile.write('y = %e,\n' % (self.y,))
if self.x is not None and 'x' not in already_processed:
already_processed.append('x')
showIndent(outfile, level)
outfile.write('x = %e,\n' % (self.x,))
if self.z is not None and 'z' not in already_processed:
already_processed.append('z')
showIndent(outfile, level)
outfile.write('z = %e,\n' % (self.z,))
if self.diameter is not None and 'diameter' not in already_processed:
already_processed.append('diameter')
showIndent(outfile, level)
outfile.write('diameter = %e,\n' % (self.diameter,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('y', node)
if value is not None and 'y' not in already_processed:
already_processed.append('y')
try:
self.y = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (y): %s' % exp)
value = find_attr_value_('x', node)
if value is not None and 'x' not in already_processed:
already_processed.append('x')
try:
self.x = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (x): %s' % exp)
value = find_attr_value_('z', node)
if value is not None and 'z' not in already_processed:
already_processed.append('z')
try:
self.z = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (z): %s' % exp)
value = find_attr_value_('diameter', node)
if value is not None and 'diameter' not in already_processed:
already_processed.append('diameter')
try:
self.diameter = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (diameter): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Point3DWithDiam
class ProximalDetails(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, translationStart=None):
self.translationStart = _cast(float, translationStart)
pass
def factory(*args_, **kwargs_):
if ProximalDetails.subclass:
return ProximalDetails.subclass(*args_, **kwargs_)
else:
return ProximalDetails(*args_, **kwargs_)
factory = staticmethod(factory)
def get_translationStart(self): return self.translationStart
def set_translationStart(self, translationStart): self.translationStart = translationStart
def export(self, outfile, level, namespace_='', name_='ProximalDetails', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ProximalDetails')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ProximalDetails'):
if self.translationStart is not None and 'translationStart' not in already_processed:
already_processed.append('translationStart')
outfile.write(' translationStart="%s"' % self.gds_format_double(self.translationStart, input_name='translationStart'))
def exportChildren(self, outfile, level, namespace_='', name_='ProximalDetails', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ProximalDetails'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.translationStart is not None and 'translationStart' not in already_processed:
already_processed.append('translationStart')
showIndent(outfile, level)
outfile.write('translationStart = %e,\n' % (self.translationStart,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('translationStart', node)
if value is not None and 'translationStart' not in already_processed:
already_processed.append('translationStart')
try:
self.translationStart = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (translationStart): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ProximalDetails
class DistalDetails(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, normalizationEnd=None):
self.normalizationEnd = _cast(float, normalizationEnd)
pass
def factory(*args_, **kwargs_):
if DistalDetails.subclass:
return DistalDetails.subclass(*args_, **kwargs_)
else:
return DistalDetails(*args_, **kwargs_)
factory = staticmethod(factory)
def get_normalizationEnd(self): return self.normalizationEnd
def set_normalizationEnd(self, normalizationEnd): self.normalizationEnd = normalizationEnd
def export(self, outfile, level, namespace_='', name_='DistalDetails', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DistalDetails')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DistalDetails'):
if self.normalizationEnd is not None and 'normalizationEnd' not in already_processed:
already_processed.append('normalizationEnd')
outfile.write(' normalizationEnd="%s"' % self.gds_format_double(self.normalizationEnd, input_name='normalizationEnd'))
def exportChildren(self, outfile, level, namespace_='', name_='DistalDetails', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='DistalDetails'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.normalizationEnd is not None and 'normalizationEnd' not in already_processed:
already_processed.append('normalizationEnd')
showIndent(outfile, level)
outfile.write('normalizationEnd = %e,\n' % (self.normalizationEnd,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('normalizationEnd', node)
if value is not None and 'normalizationEnd' not in already_processed:
already_processed.append('normalizationEnd')
try:
self.normalizationEnd = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (normalizationEnd): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class DistalDetails
class Member(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, segment=None):
self.segment = _cast(None, segment)
pass
def factory(*args_, **kwargs_):
if Member.subclass:
return Member.subclass(*args_, **kwargs_)
else:
return Member(*args_, **kwargs_)
factory = staticmethod(factory)
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def validate_SegmentId(self, value):
# Validate type SegmentId, a restriction on xs:nonNegativeInteger.
pass
def export(self, outfile, level, namespace_='', name_='Member', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Member')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Member'):
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
outfile.write(' segment=%s' % (quote_attrib(self.segment), ))
def exportChildren(self, outfile, level, namespace_='', name_='Member', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Member'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
showIndent(outfile, level)
outfile.write('segment = %d,\n' % (self.segment,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('segment', node)
if value is not None and 'segment' not in already_processed:
already_processed.append('segment')
try:
self.segment = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.segment < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
self.validate_SegmentId(self.segment) # validate type SegmentId
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Member
class Include(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, segmentGroup=None):
self.segmentGroup = _cast(None, segmentGroup)
pass
def factory(*args_, **kwargs_):
if Include.subclass:
return Include.subclass(*args_, **kwargs_)
else:
return Include(*args_, **kwargs_)
factory = staticmethod(factory)
def get_segmentGroup(self): return self.segmentGroup
def set_segmentGroup(self, segmentGroup): self.segmentGroup = segmentGroup
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='Include', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Include')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Include'):
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
outfile.write(' segmentGroup=%s' % (quote_attrib(self.segmentGroup), ))
def exportChildren(self, outfile, level, namespace_='', name_='Include', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Include'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
showIndent(outfile, level)
outfile.write('segmentGroup = "%s",\n' % (self.segmentGroup,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('segmentGroup', node)
if value is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
self.segmentGroup = value
self.validate_NmlId(self.segmentGroup) # validate type NmlId
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Include
class Path(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, fromxx=None, to=None):
self.fromxx = fromxx
self.to = to
def factory(*args_, **kwargs_):
if Path.subclass:
return Path.subclass(*args_, **kwargs_)
else:
return Path(*args_, **kwargs_)
factory = staticmethod(factory)
def get_from(self): return self.fromxx
def set_from(self, fromxx): self.fromxx = fromxx
def get_to(self): return self.to
def set_to(self, to): self.to = to
def export(self, outfile, level, namespace_='', name_='Path', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Path')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Path'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='Path', fromsubclass_=False):
if self.fromxx is not None:
self.fromxx.export(outfile, level, namespace_, name_='from')
if self.to is not None:
self.to.export(outfile, level, namespace_, name_='to')
def hasContent_(self):
if (
self.fromxx is not None or
self.to is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Path'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.fromxx is not None:
showIndent(outfile, level)
outfile.write('fromxx=model_.SegmentEndPoint(\n')
self.fromxx.exportLiteral(outfile, level, name_='from')
showIndent(outfile, level)
outfile.write('),\n')
if self.to is not None:
showIndent(outfile, level)
outfile.write('to=model_.SegmentEndPoint(\n')
self.to.exportLiteral(outfile, level, name_='to')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'from':
obj_ = SegmentEndPoint.factory()
obj_.build(child_)
self.set_from(obj_)
elif nodeName_ == 'to':
obj_ = SegmentEndPoint.factory()
obj_.build(child_)
self.set_to(obj_)
# end class Path
class SubTree(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, fromxx=None, to=None):
self.fromxx = fromxx
self.to = to
def factory(*args_, **kwargs_):
if SubTree.subclass:
return SubTree.subclass(*args_, **kwargs_)
else:
return SubTree(*args_, **kwargs_)
factory = staticmethod(factory)
def get_from(self): return self.fromxx
def set_from(self, fromxx): self.fromxx = fromxx
def get_to(self): return self.to
def set_to(self, to): self.to = to
def export(self, outfile, level, namespace_='', name_='SubTree', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SubTree')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SubTree'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='SubTree', fromsubclass_=False):
if self.fromxx is not None:
self.fromxx.export(outfile, level, namespace_, name_='from')
if self.to is not None:
self.to.export(outfile, level, namespace_, name_='to')
def hasContent_(self):
if (
self.fromxx is not None or
self.to is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='SubTree'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.fromxx is not None:
showIndent(outfile, level)
outfile.write('fromxx=model_.SegmentEndPoint(\n')
self.fromxx.exportLiteral(outfile, level, name_='from')
showIndent(outfile, level)
outfile.write('),\n')
if self.to is not None:
showIndent(outfile, level)
outfile.write('to=model_.SegmentEndPoint(\n')
self.to.exportLiteral(outfile, level, name_='to')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'from':
obj_ = SegmentEndPoint.factory()
obj_.build(child_)
self.set_from(obj_)
elif nodeName_ == 'to':
obj_ = SegmentEndPoint.factory()
obj_.build(child_)
self.set_to(obj_)
# end class SubTree
class SegmentEndPoint(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, segment=None):
self.segment = _cast(None, segment)
pass
def factory(*args_, **kwargs_):
if SegmentEndPoint.subclass:
return SegmentEndPoint.subclass(*args_, **kwargs_)
else:
return SegmentEndPoint(*args_, **kwargs_)
factory = staticmethod(factory)
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def validate_SegmentId(self, value):
# Validate type SegmentId, a restriction on xs:nonNegativeInteger.
pass
def export(self, outfile, level, namespace_='', name_='SegmentEndPoint', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SegmentEndPoint')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SegmentEndPoint'):
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
outfile.write(' segment=%s' % (quote_attrib(self.segment), ))
def exportChildren(self, outfile, level, namespace_='', name_='SegmentEndPoint', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='SegmentEndPoint'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
showIndent(outfile, level)
outfile.write('segment = %d,\n' % (self.segment,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('segment', node)
if value is not None and 'segment' not in already_processed:
already_processed.append('segment')
try:
self.segment = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.segment < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
self.validate_SegmentId(self.segment) # validate type SegmentId
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SegmentEndPoint
class MembraneProperties(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, channelPopulation=None, channelDensity=None, spikeThresh=None, specificCapacitance=None, initMembPotential=None, reversalPotential=None):
if channelPopulation is None:
self.channelPopulation = []
else:
self.channelPopulation = channelPopulation
if channelDensity is None:
self.channelDensity = []
else:
self.channelDensity = channelDensity
if spikeThresh is None:
self.spikeThresh = []
else:
self.spikeThresh = spikeThresh
if specificCapacitance is None:
self.specificCapacitance = []
else:
self.specificCapacitance = specificCapacitance
if initMembPotential is None:
self.initMembPotential = []
else:
self.initMembPotential = initMembPotential
if reversalPotential is None:
self.reversalPotential = []
else:
self.reversalPotential = reversalPotential
def factory(*args_, **kwargs_):
if MembraneProperties.subclass:
return MembraneProperties.subclass(*args_, **kwargs_)
else:
return MembraneProperties(*args_, **kwargs_)
factory = staticmethod(factory)
def get_channelPopulation(self): return self.channelPopulation
def set_channelPopulation(self, channelPopulation): self.channelPopulation = channelPopulation
def add_channelPopulation(self, value): self.channelPopulation.append(value)
def insert_channelPopulation(self, index, value): self.channelPopulation[index] = value
def get_channelDensity(self): return self.channelDensity
def set_channelDensity(self, channelDensity): self.channelDensity = channelDensity
def add_channelDensity(self, value): self.channelDensity.append(value)
def insert_channelDensity(self, index, value): self.channelDensity[index] = value
def get_spikeThresh(self): return self.spikeThresh
def set_spikeThresh(self, spikeThresh): self.spikeThresh = spikeThresh
def add_spikeThresh(self, value): self.spikeThresh.append(value)
def insert_spikeThresh(self, index, value): self.spikeThresh[index] = value
def get_specificCapacitance(self): return self.specificCapacitance
def set_specificCapacitance(self, specificCapacitance): self.specificCapacitance = specificCapacitance
def add_specificCapacitance(self, value): self.specificCapacitance.append(value)
def insert_specificCapacitance(self, index, value): self.specificCapacitance[index] = value
def get_initMembPotential(self): return self.initMembPotential
def set_initMembPotential(self, initMembPotential): self.initMembPotential = initMembPotential
def add_initMembPotential(self, value): self.initMembPotential.append(value)
def insert_initMembPotential(self, index, value): self.initMembPotential[index] = value
def get_reversalPotential(self): return self.reversalPotential
def set_reversalPotential(self, reversalPotential): self.reversalPotential = reversalPotential
def add_reversalPotential(self, value): self.reversalPotential.append(value)
def insert_reversalPotential(self, index, value): self.reversalPotential[index] = value
def export(self, outfile, level, namespace_='', name_='MembraneProperties', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='MembraneProperties')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='MembraneProperties'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='MembraneProperties', fromsubclass_=False):
for channelPopulation_ in self.channelPopulation:
channelPopulation_.export(outfile, level, namespace_, name_='channelPopulation')
for channelDensity_ in self.channelDensity:
channelDensity_.export(outfile, level, namespace_, name_='channelDensity')
for spikeThresh_ in self.spikeThresh:
spikeThresh_.export(outfile, level, namespace_, name_='spikeThresh')
for specificCapacitance_ in self.specificCapacitance:
specificCapacitance_.export(outfile, level, namespace_, name_='specificCapacitance')
for initMembPotential_ in self.initMembPotential:
initMembPotential_.export(outfile, level, namespace_, name_='initMembPotential')
for reversalPotential_ in self.reversalPotential:
reversalPotential_.export(outfile, level, namespace_, name_='reversalPotential')
def hasContent_(self):
if (
self.channelPopulation or
self.channelDensity or
self.spikeThresh or
self.specificCapacitance or
self.initMembPotential or
self.reversalPotential
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='MembraneProperties'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('channelPopulation=[\n')
level += 1
for channelPopulation_ in self.channelPopulation:
showIndent(outfile, level)
outfile.write('model_.ChannelPopulation(\n')
channelPopulation_.exportLiteral(outfile, level, name_='ChannelPopulation')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('channelDensity=[\n')
level += 1
for channelDensity_ in self.channelDensity:
showIndent(outfile, level)
outfile.write('model_.ChannelDensity(\n')
channelDensity_.exportLiteral(outfile, level, name_='ChannelDensity')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('spikeThresh=[\n')
level += 1
for spikeThresh_ in self.spikeThresh:
showIndent(outfile, level)
outfile.write('model_.ValueAcrossSegOrSegGroup(\n')
spikeThresh_.exportLiteral(outfile, level, name_='ValueAcrossSegOrSegGroup')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('specificCapacitance=[\n')
level += 1
for specificCapacitance_ in self.specificCapacitance:
showIndent(outfile, level)
outfile.write('model_.ValueAcrossSegOrSegGroup(\n')
specificCapacitance_.exportLiteral(outfile, level, name_='ValueAcrossSegOrSegGroup')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('initMembPotential=[\n')
level += 1
for initMembPotential_ in self.initMembPotential:
showIndent(outfile, level)
outfile.write('model_.ValueAcrossSegOrSegGroup(\n')
initMembPotential_.exportLiteral(outfile, level, name_='ValueAcrossSegOrSegGroup')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('reversalPotential=[\n')
level += 1
for reversalPotential_ in self.reversalPotential:
showIndent(outfile, level)
outfile.write('model_.ReversalPotential(\n')
reversalPotential_.exportLiteral(outfile, level, name_='ReversalPotential')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'channelPopulation':
obj_ = ChannelPopulation.factory()
obj_.build(child_)
self.channelPopulation.append(obj_)
elif nodeName_ == 'channelDensity':
obj_ = ChannelDensity.factory()
obj_.build(child_)
self.channelDensity.append(obj_)
elif nodeName_ == 'spikeThresh':
class_obj_ = self.get_class_obj_(child_, ValueAcrossSegOrSegGroup)
obj_ = class_obj_.factory()
obj_.build(child_)
self.spikeThresh.append(obj_)
elif nodeName_ == 'specificCapacitance':
class_obj_ = self.get_class_obj_(child_, ValueAcrossSegOrSegGroup)
obj_ = class_obj_.factory()
obj_.build(child_)
self.specificCapacitance.append(obj_)
elif nodeName_ == 'initMembPotential':
class_obj_ = self.get_class_obj_(child_, ValueAcrossSegOrSegGroup)
obj_ = class_obj_.factory()
obj_.build(child_)
self.initMembPotential.append(obj_)
elif nodeName_ == 'reversalPotential':
obj_ = ReversalPotential.factory()
obj_.build(child_)
self.reversalPotential.append(obj_)
# end class MembraneProperties
class ValueAcrossSegOrSegGroup(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, segment=None, segmentGroup='all', value=None, extensiontype_=None):
self.segment = _cast(None, segment)
self.segmentGroup = _cast(None, segmentGroup)
self.value = _cast(None, value)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if ValueAcrossSegOrSegGroup.subclass:
return ValueAcrossSegOrSegGroup.subclass(*args_, **kwargs_)
else:
return ValueAcrossSegOrSegGroup(*args_, **kwargs_)
factory = staticmethod(factory)
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_segmentGroup(self): return self.segmentGroup
def set_segmentGroup(self, segmentGroup): self.segmentGroup = segmentGroup
def get_value(self): return self.value
def set_value(self, value): self.value = value
def validate_Nml2Quantity(self, value):
# Validate type Nml2Quantity, a restriction on xs:string.
pass
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='ValueAcrossSegOrSegGroup', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ValueAcrossSegOrSegGroup')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ValueAcrossSegOrSegGroup'):
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
outfile.write(' segment=%s' % (quote_attrib(self.segment), ))
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
outfile.write(' segmentGroup=%s' % (quote_attrib(self.segmentGroup), ))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
outfile.write(' value=%s' % (quote_attrib(self.value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='ValueAcrossSegOrSegGroup', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ValueAcrossSegOrSegGroup'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
showIndent(outfile, level)
outfile.write('segment = "%s",\n' % (self.segment,))
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
showIndent(outfile, level)
outfile.write('segmentGroup = "%s",\n' % (self.segmentGroup,))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
showIndent(outfile, level)
outfile.write('value = "%s",\n' % (self.value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('segment', node)
if value is not None and 'segment' not in already_processed:
already_processed.append('segment')
self.segment = value
self.validate_NmlId(self.segment) # validate type NmlId
value = find_attr_value_('segmentGroup', node)
if value is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
self.segmentGroup = value
self.validate_NmlId(self.segmentGroup) # validate type NmlId
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.append('value')
self.value = value
self.validate_Nml2Quantity(self.value) # validate type Nml2Quantity
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ValueAcrossSegOrSegGroup
class VariableParameter(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, segmentGroup=None, parameter=None, inhomogeneousValue=None):
self.segmentGroup = _cast(None, segmentGroup)
self.parameter = _cast(None, parameter)
self.inhomogeneousValue = inhomogeneousValue
def factory(*args_, **kwargs_):
if VariableParameter.subclass:
return VariableParameter.subclass(*args_, **kwargs_)
else:
return VariableParameter(*args_, **kwargs_)
factory = staticmethod(factory)
def get_inhomogeneousValue(self): return self.inhomogeneousValue
def set_inhomogeneousValue(self, inhomogeneousValue): self.inhomogeneousValue = inhomogeneousValue
def get_segmentGroup(self): return self.segmentGroup
def set_segmentGroup(self, segmentGroup): self.segmentGroup = segmentGroup
def get_parameter(self): return self.parameter
def set_parameter(self, parameter): self.parameter = parameter
def export(self, outfile, level, namespace_='', name_='VariableParameter', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VariableParameter')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='VariableParameter'):
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
outfile.write(' segmentGroup=%s' % (self.gds_format_string(quote_attrib(self.segmentGroup).encode(ExternalEncoding), input_name='segmentGroup'), ))
if self.parameter is not None and 'parameter' not in already_processed:
already_processed.append('parameter')
outfile.write(' parameter=%s' % (self.gds_format_string(quote_attrib(self.parameter).encode(ExternalEncoding), input_name='parameter'), ))
def exportChildren(self, outfile, level, namespace_='', name_='VariableParameter', fromsubclass_=False):
if self.inhomogeneousValue is not None:
self.inhomogeneousValue.export(outfile, level, namespace_, name_='inhomogeneousValue')
def hasContent_(self):
if (
self.inhomogeneousValue is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VariableParameter'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
showIndent(outfile, level)
outfile.write('segmentGroup = "%s",\n' % (self.segmentGroup,))
if self.parameter is not None and 'parameter' not in already_processed:
already_processed.append('parameter')
showIndent(outfile, level)
outfile.write('parameter = "%s",\n' % (self.parameter,))
def exportLiteralChildren(self, outfile, level, name_):
if self.inhomogeneousValue is not None:
showIndent(outfile, level)
outfile.write('inhomogeneousValue=model_.InhomogeneousValue(\n')
self.inhomogeneousValue.exportLiteral(outfile, level, name_='inhomogeneousValue')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('segmentGroup', node)
if value is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
self.segmentGroup = value
value = find_attr_value_('parameter', node)
if value is not None and 'parameter' not in already_processed:
already_processed.append('parameter')
self.parameter = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'inhomogeneousValue':
obj_ = InhomogeneousValue.factory()
obj_.build(child_)
self.set_inhomogeneousValue(obj_)
# end class VariableParameter
class InhomogeneousValue(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, inhomogeneousParam=None, value=None):
self.inhomogeneousParam = _cast(None, inhomogeneousParam)
self.value = _cast(None, value)
pass
def factory(*args_, **kwargs_):
if InhomogeneousValue.subclass:
return InhomogeneousValue.subclass(*args_, **kwargs_)
else:
return InhomogeneousValue(*args_, **kwargs_)
factory = staticmethod(factory)
def get_inhomogeneousParam(self): return self.inhomogeneousParam
def set_inhomogeneousParam(self, inhomogeneousParam): self.inhomogeneousParam = inhomogeneousParam
def get_value(self): return self.value
def set_value(self, value): self.value = value
def export(self, outfile, level, namespace_='', name_='InhomogeneousValue', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='InhomogeneousValue')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='InhomogeneousValue'):
if self.inhomogeneousParam is not None and 'inhomogeneousParam' not in already_processed:
already_processed.append('inhomogeneousParam')
outfile.write(' inhomogeneousParam=%s' % (self.gds_format_string(quote_attrib(self.inhomogeneousParam).encode(ExternalEncoding), input_name='inhomogeneousParam'), ))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
outfile.write(' value=%s' % (self.gds_format_string(quote_attrib(self.value).encode(ExternalEncoding), input_name='value'), ))
def exportChildren(self, outfile, level, namespace_='', name_='InhomogeneousValue', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='InhomogeneousValue'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.inhomogeneousParam is not None and 'inhomogeneousParam' not in already_processed:
already_processed.append('inhomogeneousParam')
showIndent(outfile, level)
outfile.write('inhomogeneousParam = "%s",\n' % (self.inhomogeneousParam,))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
showIndent(outfile, level)
outfile.write('value = "%s",\n' % (self.value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('inhomogeneousParam', node)
if value is not None and 'inhomogeneousParam' not in already_processed:
already_processed.append('inhomogeneousParam')
self.inhomogeneousParam = value
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.append('value')
self.value = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class InhomogeneousValue
class ReversalPotential(ValueAcrossSegOrSegGroup):
subclass = None
superclass = ValueAcrossSegOrSegGroup
def __init__(self, segment=None, segmentGroup='all', value=None, species=None):
super(ReversalPotential, self).__init__(segment, segmentGroup, value, )
self.species = _cast(None, species)
pass
def factory(*args_, **kwargs_):
if ReversalPotential.subclass:
return ReversalPotential.subclass(*args_, **kwargs_)
else:
return ReversalPotential(*args_, **kwargs_)
factory = staticmethod(factory)
def get_species(self): return self.species
def set_species(self, species): self.species = species
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='ReversalPotential', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReversalPotential')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ReversalPotential'):
super(ReversalPotential, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ReversalPotential')
if self.species is not None and 'species' not in already_processed:
already_processed.append('species')
outfile.write(' species=%s' % (quote_attrib(self.species), ))
def exportChildren(self, outfile, level, namespace_='', name_='ReversalPotential', fromsubclass_=False):
super(ReversalPotential, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
super(ReversalPotential, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ReversalPotential'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.species is not None and 'species' not in already_processed:
already_processed.append('species')
showIndent(outfile, level)
outfile.write('species = "%s",\n' % (self.species,))
super(ReversalPotential, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ReversalPotential, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('species', node)
if value is not None and 'species' not in already_processed:
already_processed.append('species')
self.species = value
self.validate_NmlId(self.species) # validate type NmlId
super(ReversalPotential, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ReversalPotential, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ReversalPotential
class Species(ValueAcrossSegOrSegGroup):
"""Specifying the ion here again is redundant, the ion name should be
the same as id. Kept for now until LEMS implementation can
select by id. TODO: remove."""
subclass = None
superclass = ValueAcrossSegOrSegGroup
def __init__(self, segment=None, segmentGroup='all', value=None, ion=None, id=None, decayingPoolConcentrationModel=None, concentrationModel=None, fixedConcentration=None):
super(Species, self).__init__(segment, segmentGroup, value, )
self.ion = _cast(None, ion)
self.id = _cast(None, id)
self.decayingPoolConcentrationModel = decayingPoolConcentrationModel
self.concentrationModel = concentrationModel
self.fixedConcentration = fixedConcentration
def factory(*args_, **kwargs_):
if Species.subclass:
return Species.subclass(*args_, **kwargs_)
else:
return Species(*args_, **kwargs_)
factory = staticmethod(factory)
def get_decayingPoolConcentrationModel(self): return self.decayingPoolConcentrationModel
def set_decayingPoolConcentrationModel(self, decayingPoolConcentrationModel): self.decayingPoolConcentrationModel = decayingPoolConcentrationModel
def get_concentrationModel(self): return self.concentrationModel
def set_concentrationModel(self, concentrationModel): self.concentrationModel = concentrationModel
def get_fixedConcentration(self): return self.fixedConcentration
def set_fixedConcentration(self, fixedConcentration): self.fixedConcentration = fixedConcentration
def get_ion(self): return self.ion
def set_ion(self, ion): self.ion = ion
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_id(self): return self.id
def set_id(self, id): self.id = id
def export(self, outfile, level, namespace_='', name_='Species', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Species')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Species'):
super(Species, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Species')
if self.ion is not None and 'ion' not in already_processed:
already_processed.append('ion')
outfile.write(' ion=%s' % (quote_attrib(self.ion), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
def exportChildren(self, outfile, level, namespace_='', name_='Species', fromsubclass_=False):
super(Species, self).exportChildren(outfile, level, namespace_, name_, True)
if self.decayingPoolConcentrationModel is not None:
self.decayingPoolConcentrationModel.export(outfile, level, namespace_, name_='decayingPoolConcentrationModel', )
if self.concentrationModel is not None:
self.concentrationModel.export(outfile, level, namespace_, name_='concentrationModel', )
if self.fixedConcentration is not None:
self.fixedConcentration.export(outfile, level, namespace_, name_='fixedConcentration', )
def hasContent_(self):
if (
self.decayingPoolConcentrationModel is not None or
self.concentrationModel is not None or
self.fixedConcentration is not None or
super(Species, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Species'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.ion is not None and 'ion' not in already_processed:
already_processed.append('ion')
showIndent(outfile, level)
outfile.write('ion = "%s",\n' % (self.ion,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
super(Species, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Species, self).exportLiteralChildren(outfile, level, name_)
if self.decayingPoolConcentrationModel is not None:
showIndent(outfile, level)
outfile.write('decayingPoolConcentrationModel=model_.DecayingPoolConcentrationModel(\n')
self.decayingPoolConcentrationModel.exportLiteral(outfile, level, name_='decayingPoolConcentrationModel')
showIndent(outfile, level)
outfile.write('),\n')
if self.concentrationModel is not None:
showIndent(outfile, level)
outfile.write('concentrationModel=model_.ConcentrationModel_D(\n')
self.concentrationModel.exportLiteral(outfile, level, name_='concentrationModel')
showIndent(outfile, level)
outfile.write('),\n')
if self.fixedConcentration is not None:
showIndent(outfile, level)
outfile.write('fixedConcentration=model_.FixedConcentration(\n')
self.fixedConcentration.exportLiteral(outfile, level, name_='fixedConcentration')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('ion', node)
if value is not None and 'ion' not in already_processed:
already_processed.append('ion')
self.ion = value
self.validate_NmlId(self.ion) # validate type NmlId
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.validate_NmlId(self.id) # validate type NmlId
super(Species, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'decayingPoolConcentrationModel':
class_obj_ = self.get_class_obj_(child_, DecayingPoolConcentrationModel)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_decayingPoolConcentrationModel(obj_)
elif nodeName_ == 'concentrationModel':
obj_ = ConcentrationModel_D.factory()
obj_.build(child_)
self.set_concentrationModel(obj_)
elif nodeName_ == 'fixedConcentration':
obj_ = FixedConcentration.factory()
obj_.build(child_)
self.set_fixedConcentration(obj_)
super(Species, self).buildChildren(child_, node, nodeName_, True)
# end class Species
class FixedConcentration(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, concentration=None):
self.concentration = _cast(None, concentration)
pass
def factory(*args_, **kwargs_):
if FixedConcentration.subclass:
return FixedConcentration.subclass(*args_, **kwargs_)
else:
return FixedConcentration(*args_, **kwargs_)
factory = staticmethod(factory)
def get_concentration(self): return self.concentration
def set_concentration(self, concentration): self.concentration = concentration
def validate_Nml2Quantity_concentration(self, value):
# Validate type Nml2Quantity_concentration, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='FixedConcentration', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='FixedConcentration')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='FixedConcentration'):
if self.concentration is not None and 'concentration' not in already_processed:
already_processed.append('concentration')
outfile.write(' concentration=%s' % (quote_attrib(self.concentration), ))
def exportChildren(self, outfile, level, namespace_='', name_='FixedConcentration', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='FixedConcentration'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.concentration is not None and 'concentration' not in already_processed:
already_processed.append('concentration')
showIndent(outfile, level)
outfile.write('concentration = "%s",\n' % (self.concentration,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('concentration', node)
if value is not None and 'concentration' not in already_processed:
already_processed.append('concentration')
self.concentration = value
self.validate_Nml2Quantity_concentration(self.concentration) # validate type Nml2Quantity_concentration
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class FixedConcentration
class DecayingPoolConcentrationModel(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, extConcentration=None, resting_conc=None, decay_constant=None, shell_thickness=None, initialConcentration=None, extensiontype_=None):
self.extConcentration = _cast(None, extConcentration)
self.resting_conc = _cast(None, resting_conc)
self.decay_constant = _cast(None, decay_constant)
self.shell_thickness = _cast(None, shell_thickness)
self.initialConcentration = _cast(None, initialConcentration)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if DecayingPoolConcentrationModel.subclass:
return DecayingPoolConcentrationModel.subclass(*args_, **kwargs_)
else:
return DecayingPoolConcentrationModel(*args_, **kwargs_)
factory = staticmethod(factory)
def get_extConcentration(self): return self.extConcentration
def set_extConcentration(self, extConcentration): self.extConcentration = extConcentration
def validate_Nml2Quantity_concentration(self, value):
# Validate type Nml2Quantity_concentration, a restriction on xs:string.
pass
def get_resting_conc(self): return self.resting_conc
def set_resting_conc(self, resting_conc): self.resting_conc = resting_conc
def get_decay_constant(self): return self.decay_constant
def set_decay_constant(self, decay_constant): self.decay_constant = decay_constant
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_shell_thickness(self): return self.shell_thickness
def set_shell_thickness(self, shell_thickness): self.shell_thickness = shell_thickness
def validate_Nml2Quantity_length(self, value):
# Validate type Nml2Quantity_length, a restriction on xs:string.
pass
def get_initialConcentration(self): return self.initialConcentration
def set_initialConcentration(self, initialConcentration): self.initialConcentration = initialConcentration
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='DecayingPoolConcentrationModel', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DecayingPoolConcentrationModel')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='DecayingPoolConcentrationModel'):
if self.extConcentration is not None and 'extConcentration' not in already_processed:
already_processed.append('extConcentration')
outfile.write(' extConcentration=%s' % (quote_attrib(self.extConcentration), ))
if self.resting_conc is not None and 'resting_conc' not in already_processed:
already_processed.append('resting_conc')
outfile.write(' resting_conc=%s' % (quote_attrib(self.resting_conc), ))
if self.decay_constant is not None and 'decay_constant' not in already_processed:
already_processed.append('decay_constant')
outfile.write(' decay_constant=%s' % (quote_attrib(self.decay_constant), ))
if self.shell_thickness is not None and 'shell_thickness' not in already_processed:
already_processed.append('shell_thickness')
outfile.write(' shell_thickness=%s' % (quote_attrib(self.shell_thickness), ))
if self.initialConcentration is not None and 'initialConcentration' not in already_processed:
already_processed.append('initialConcentration')
outfile.write(' initialConcentration=%s' % (quote_attrib(self.initialConcentration), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='DecayingPoolConcentrationModel', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='DecayingPoolConcentrationModel'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.extConcentration is not None and 'extConcentration' not in already_processed:
already_processed.append('extConcentration')
showIndent(outfile, level)
outfile.write('extConcentration = "%s",\n' % (self.extConcentration,))
if self.resting_conc is not None and 'resting_conc' not in already_processed:
already_processed.append('resting_conc')
showIndent(outfile, level)
outfile.write('resting_conc = "%s",\n' % (self.resting_conc,))
if self.decay_constant is not None and 'decay_constant' not in already_processed:
already_processed.append('decay_constant')
showIndent(outfile, level)
outfile.write('decay_constant = "%s",\n' % (self.decay_constant,))
if self.shell_thickness is not None and 'shell_thickness' not in already_processed:
already_processed.append('shell_thickness')
showIndent(outfile, level)
outfile.write('shell_thickness = "%s",\n' % (self.shell_thickness,))
if self.initialConcentration is not None and 'initialConcentration' not in already_processed:
already_processed.append('initialConcentration')
showIndent(outfile, level)
outfile.write('initialConcentration = "%s",\n' % (self.initialConcentration,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('extConcentration', node)
if value is not None and 'extConcentration' not in already_processed:
already_processed.append('extConcentration')
self.extConcentration = value
self.validate_Nml2Quantity_concentration(self.extConcentration) # validate type Nml2Quantity_concentration
value = find_attr_value_('resting_conc', node)
if value is not None and 'resting_conc' not in already_processed:
already_processed.append('resting_conc')
self.resting_conc = value
self.validate_Nml2Quantity_concentration(self.resting_conc) # validate type Nml2Quantity_concentration
value = find_attr_value_('decay_constant', node)
if value is not None and 'decay_constant' not in already_processed:
already_processed.append('decay_constant')
self.decay_constant = value
self.validate_Nml2Quantity_time(self.decay_constant) # validate type Nml2Quantity_time
value = find_attr_value_('shell_thickness', node)
if value is not None and 'shell_thickness' not in already_processed:
already_processed.append('shell_thickness')
self.shell_thickness = value
self.validate_Nml2Quantity_length(self.shell_thickness) # validate type Nml2Quantity_length
value = find_attr_value_('initialConcentration', node)
if value is not None and 'initialConcentration' not in already_processed:
already_processed.append('initialConcentration')
self.initialConcentration = value
self.validate_Nml2Quantity_concentration(self.initialConcentration) # validate type Nml2Quantity_concentration
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class DecayingPoolConcentrationModel
class ConcentrationModel_D(DecayingPoolConcentrationModel):
subclass = None
superclass = DecayingPoolConcentrationModel
def __init__(self, extConcentration=None, resting_conc=None, decay_constant=None, shell_thickness=None, initialConcentration=None, type_=None):
super(ConcentrationModel_D, self).__init__(extConcentration, resting_conc, decay_constant, shell_thickness, initialConcentration, )
self.type_ = _cast(None, type_)
pass
def factory(*args_, **kwargs_):
if ConcentrationModel_D.subclass:
return ConcentrationModel_D.subclass(*args_, **kwargs_)
else:
return ConcentrationModel_D(*args_, **kwargs_)
factory = staticmethod(factory)
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def export(self, outfile, level, namespace_='', name_='ConcentrationModel_D', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConcentrationModel_D')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ConcentrationModel_D'):
super(ConcentrationModel_D, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ConcentrationModel_D')
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
def exportChildren(self, outfile, level, namespace_='', name_='ConcentrationModel_D', fromsubclass_=False):
super(ConcentrationModel_D, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
super(ConcentrationModel_D, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ConcentrationModel_D'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
super(ConcentrationModel_D, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ConcentrationModel_D, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
super(ConcentrationModel_D, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ConcentrationModel_D, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ConcentrationModel_D
class IntracellularProperties(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, species=None, resistivity=None):
if species is None:
self.species = []
else:
self.species = species
if resistivity is None:
self.resistivity = []
else:
self.resistivity = resistivity
def factory(*args_, **kwargs_):
if IntracellularProperties.subclass:
return IntracellularProperties.subclass(*args_, **kwargs_)
else:
return IntracellularProperties(*args_, **kwargs_)
factory = staticmethod(factory)
def get_species(self): return self.species
def set_species(self, species): self.species = species
def add_species(self, value): self.species.append(value)
def insert_species(self, index, value): self.species[index] = value
def get_resistivity(self): return self.resistivity
def set_resistivity(self, resistivity): self.resistivity = resistivity
def add_resistivity(self, value): self.resistivity.append(value)
def insert_resistivity(self, index, value): self.resistivity[index] = value
def export(self, outfile, level, namespace_='', name_='IntracellularProperties', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IntracellularProperties')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='IntracellularProperties'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='IntracellularProperties', fromsubclass_=False):
for species_ in self.species:
species_.export(outfile, level, namespace_, name_='species')
for resistivity_ in self.resistivity:
resistivity_.export(outfile, level, namespace_, name_='resistivity')
def hasContent_(self):
if (
self.species or
self.resistivity
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IntracellularProperties'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('species=[\n')
level += 1
for species_ in self.species:
showIndent(outfile, level)
outfile.write('model_.Species(\n')
species_.exportLiteral(outfile, level, name_='Species')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('resistivity=[\n')
level += 1
for resistivity_ in self.resistivity:
showIndent(outfile, level)
outfile.write('model_.ValueAcrossSegOrSegGroup(\n')
resistivity_.exportLiteral(outfile, level, name_='ValueAcrossSegOrSegGroup')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'species':
obj_ = Species.factory()
obj_.build(child_)
self.species.append(obj_)
elif nodeName_ == 'resistivity':
class_obj_ = self.get_class_obj_(child_, ValueAcrossSegOrSegGroup)
obj_ = class_obj_.factory()
obj_.build(child_)
self.resistivity.append(obj_)
# end class IntracellularProperties
class SpaceStructure(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, ySpacing=None, zStart=0, yStart=0, zSpacing=None, xStart=0, xSpacing=None):
self.ySpacing = _cast(float, ySpacing)
self.zStart = _cast(float, zStart)
self.yStart = _cast(float, yStart)
self.zSpacing = _cast(float, zSpacing)
self.xStart = _cast(float, xStart)
self.xSpacing = _cast(float, xSpacing)
pass
def factory(*args_, **kwargs_):
if SpaceStructure.subclass:
return SpaceStructure.subclass(*args_, **kwargs_)
else:
return SpaceStructure(*args_, **kwargs_)
factory = staticmethod(factory)
def get_ySpacing(self): return self.ySpacing
def set_ySpacing(self, ySpacing): self.ySpacing = ySpacing
def get_zStart(self): return self.zStart
def set_zStart(self, zStart): self.zStart = zStart
def get_yStart(self): return self.yStart
def set_yStart(self, yStart): self.yStart = yStart
def get_zSpacing(self): return self.zSpacing
def set_zSpacing(self, zSpacing): self.zSpacing = zSpacing
def get_xStart(self): return self.xStart
def set_xStart(self, xStart): self.xStart = xStart
def get_xSpacing(self): return self.xSpacing
def set_xSpacing(self, xSpacing): self.xSpacing = xSpacing
def export(self, outfile, level, namespace_='', name_='SpaceStructure', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SpaceStructure')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SpaceStructure'):
if self.ySpacing is not None and 'ySpacing' not in already_processed:
already_processed.append('ySpacing')
outfile.write(' ySpacing="%s"' % self.gds_format_float(self.ySpacing, input_name='ySpacing'))
if self.zStart is not None and 'zStart' not in already_processed:
already_processed.append('zStart')
outfile.write(' zStart="%s"' % self.gds_format_float(self.zStart, input_name='zStart'))
if self.yStart is not None and 'yStart' not in already_processed:
already_processed.append('yStart')
outfile.write(' yStart="%s"' % self.gds_format_float(self.yStart, input_name='yStart'))
if self.zSpacing is not None and 'zSpacing' not in already_processed:
already_processed.append('zSpacing')
outfile.write(' zSpacing="%s"' % self.gds_format_float(self.zSpacing, input_name='zSpacing'))
if self.xStart is not None and 'xStart' not in already_processed:
already_processed.append('xStart')
outfile.write(' xStart="%s"' % self.gds_format_float(self.xStart, input_name='xStart'))
if self.xSpacing is not None and 'xSpacing' not in already_processed:
already_processed.append('xSpacing')
outfile.write(' xSpacing="%s"' % self.gds_format_float(self.xSpacing, input_name='xSpacing'))
def exportChildren(self, outfile, level, namespace_='', name_='SpaceStructure', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='SpaceStructure'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.ySpacing is not None and 'ySpacing' not in already_processed:
already_processed.append('ySpacing')
showIndent(outfile, level)
outfile.write('ySpacing = %f,\n' % (self.ySpacing,))
if self.zStart is not None and 'zStart' not in already_processed:
already_processed.append('zStart')
showIndent(outfile, level)
outfile.write('zStart = %f,\n' % (self.zStart,))
if self.yStart is not None and 'yStart' not in already_processed:
already_processed.append('yStart')
showIndent(outfile, level)
outfile.write('yStart = %f,\n' % (self.yStart,))
if self.zSpacing is not None and 'zSpacing' not in already_processed:
already_processed.append('zSpacing')
showIndent(outfile, level)
outfile.write('zSpacing = %f,\n' % (self.zSpacing,))
if self.xStart is not None and 'xStart' not in already_processed:
already_processed.append('xStart')
showIndent(outfile, level)
outfile.write('xStart = %f,\n' % (self.xStart,))
if self.xSpacing is not None and 'xSpacing' not in already_processed:
already_processed.append('xSpacing')
showIndent(outfile, level)
outfile.write('xSpacing = %f,\n' % (self.xSpacing,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('ySpacing', node)
if value is not None and 'ySpacing' not in already_processed:
already_processed.append('ySpacing')
try:
self.ySpacing = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (ySpacing): %s' % exp)
value = find_attr_value_('zStart', node)
if value is not None and 'zStart' not in already_processed:
already_processed.append('zStart')
try:
self.zStart = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (zStart): %s' % exp)
value = find_attr_value_('yStart', node)
if value is not None and 'yStart' not in already_processed:
already_processed.append('yStart')
try:
self.yStart = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (yStart): %s' % exp)
value = find_attr_value_('zSpacing', node)
if value is not None and 'zSpacing' not in already_processed:
already_processed.append('zSpacing')
try:
self.zSpacing = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (zSpacing): %s' % exp)
value = find_attr_value_('xStart', node)
if value is not None and 'xStart' not in already_processed:
already_processed.append('xStart')
try:
self.xStart = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (xStart): %s' % exp)
value = find_attr_value_('xSpacing', node)
if value is not None and 'xSpacing' not in already_processed:
already_processed.append('xSpacing')
try:
self.xSpacing = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (xSpacing): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SpaceStructure
class Layout(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, space=None, random=None, grid=None, unstructured=None):
self.space = _cast(None, space)
self.random = random
self.grid = grid
self.unstructured = unstructured
def factory(*args_, **kwargs_):
if Layout.subclass:
return Layout.subclass(*args_, **kwargs_)
else:
return Layout(*args_, **kwargs_)
factory = staticmethod(factory)
def get_random(self): return self.random
def set_random(self, random): self.random = random
def get_grid(self): return self.grid
def set_grid(self, grid): self.grid = grid
def get_unstructured(self): return self.unstructured
def set_unstructured(self, unstructured): self.unstructured = unstructured
def get_space(self): return self.space
def set_space(self, space): self.space = space
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='Layout', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Layout')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Layout'):
if self.space is not None and 'space' not in already_processed:
already_processed.append('space')
outfile.write(' space=%s' % (quote_attrib(self.space), ))
def exportChildren(self, outfile, level, namespace_='', name_='Layout', fromsubclass_=False):
if self.random is not None:
self.random.export(outfile, level, namespace_, name_='random', )
if self.grid is not None:
self.grid.export(outfile, level, namespace_, name_='grid', )
if self.unstructured is not None:
self.unstructured.export(outfile, level, namespace_, name_='unstructured', )
def hasContent_(self):
if (
self.random is not None or
self.grid is not None or
self.unstructured is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Layout'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.space is not None and 'space' not in already_processed:
already_processed.append('space')
showIndent(outfile, level)
outfile.write('space = "%s",\n' % (self.space,))
def exportLiteralChildren(self, outfile, level, name_):
if self.random is not None:
showIndent(outfile, level)
outfile.write('random=model_.RandomLayout(\n')
self.random.exportLiteral(outfile, level, name_='random')
showIndent(outfile, level)
outfile.write('),\n')
if self.grid is not None:
showIndent(outfile, level)
outfile.write('grid=model_.GridLayout(\n')
self.grid.exportLiteral(outfile, level, name_='grid')
showIndent(outfile, level)
outfile.write('),\n')
if self.unstructured is not None:
showIndent(outfile, level)
outfile.write('unstructured=model_.UnstructuredLayout(\n')
self.unstructured.exportLiteral(outfile, level, name_='unstructured')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('space', node)
if value is not None and 'space' not in already_processed:
already_processed.append('space')
self.space = value
self.validate_NmlId(self.space) # validate type NmlId
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'random':
obj_ = RandomLayout.factory()
obj_.build(child_)
self.set_random(obj_)
elif nodeName_ == 'grid':
obj_ = GridLayout.factory()
obj_.build(child_)
self.set_grid(obj_)
elif nodeName_ == 'unstructured':
obj_ = UnstructuredLayout.factory()
obj_.build(child_)
self.set_unstructured(obj_)
# end class Layout
class UnstructuredLayout(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, number=None):
self.number = _cast(int, number)
pass
def factory(*args_, **kwargs_):
if UnstructuredLayout.subclass:
return UnstructuredLayout.subclass(*args_, **kwargs_)
else:
return UnstructuredLayout(*args_, **kwargs_)
factory = staticmethod(factory)
def get_number(self): return self.number
def set_number(self, number): self.number = number
def export(self, outfile, level, namespace_='', name_='UnstructuredLayout', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='UnstructuredLayout')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='UnstructuredLayout'):
if self.number is not None and 'number' not in already_processed:
already_processed.append('number')
outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number'))
def exportChildren(self, outfile, level, namespace_='', name_='UnstructuredLayout', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='UnstructuredLayout'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.number is not None and 'number' not in already_processed:
already_processed.append('number')
showIndent(outfile, level)
outfile.write('number = %d,\n' % (self.number,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('number', node)
if value is not None and 'number' not in already_processed:
already_processed.append('number')
try:
self.number = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.number < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class UnstructuredLayout
class RandomLayout(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, region=None, number=None):
self.region = _cast(None, region)
self.number = _cast(int, number)
pass
def factory(*args_, **kwargs_):
if RandomLayout.subclass:
return RandomLayout.subclass(*args_, **kwargs_)
else:
return RandomLayout(*args_, **kwargs_)
factory = staticmethod(factory)
def get_region(self): return self.region
def set_region(self, region): self.region = region
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_number(self): return self.number
def set_number(self, number): self.number = number
def export(self, outfile, level, namespace_='', name_='RandomLayout', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RandomLayout')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='RandomLayout'):
if self.region is not None and 'region' not in already_processed:
already_processed.append('region')
outfile.write(' region=%s' % (quote_attrib(self.region), ))
if self.number is not None and 'number' not in already_processed:
already_processed.append('number')
outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number'))
def exportChildren(self, outfile, level, namespace_='', name_='RandomLayout', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='RandomLayout'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.region is not None and 'region' not in already_processed:
already_processed.append('region')
showIndent(outfile, level)
outfile.write('region = "%s",\n' % (self.region,))
if self.number is not None and 'number' not in already_processed:
already_processed.append('number')
showIndent(outfile, level)
outfile.write('number = %d,\n' % (self.number,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('region', node)
if value is not None and 'region' not in already_processed:
already_processed.append('region')
self.region = value
self.validate_NmlId(self.region) # validate type NmlId
value = find_attr_value_('number', node)
if value is not None and 'number' not in already_processed:
already_processed.append('number')
try:
self.number = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.number < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class RandomLayout
class GridLayout(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, zSize=None, ySize=None, xSize=None):
self.zSize = _cast(int, zSize)
self.ySize = _cast(int, ySize)
self.xSize = _cast(int, xSize)
pass
def factory(*args_, **kwargs_):
if GridLayout.subclass:
return GridLayout.subclass(*args_, **kwargs_)
else:
return GridLayout(*args_, **kwargs_)
factory = staticmethod(factory)
def get_zSize(self): return self.zSize
def set_zSize(self, zSize): self.zSize = zSize
def get_ySize(self): return self.ySize
def set_ySize(self, ySize): self.ySize = ySize
def get_xSize(self): return self.xSize
def set_xSize(self, xSize): self.xSize = xSize
def export(self, outfile, level, namespace_='', name_='GridLayout', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='GridLayout')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='GridLayout'):
if self.zSize is not None and 'zSize' not in already_processed:
already_processed.append('zSize')
outfile.write(' zSize="%s"' % self.gds_format_integer(self.zSize, input_name='zSize'))
if self.ySize is not None and 'ySize' not in already_processed:
already_processed.append('ySize')
outfile.write(' ySize="%s"' % self.gds_format_integer(self.ySize, input_name='ySize'))
if self.xSize is not None and 'xSize' not in already_processed:
already_processed.append('xSize')
outfile.write(' xSize="%s"' % self.gds_format_integer(self.xSize, input_name='xSize'))
def exportChildren(self, outfile, level, namespace_='', name_='GridLayout', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='GridLayout'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.zSize is not None and 'zSize' not in already_processed:
already_processed.append('zSize')
showIndent(outfile, level)
outfile.write('zSize = %d,\n' % (self.zSize,))
if self.ySize is not None and 'ySize' not in already_processed:
already_processed.append('ySize')
showIndent(outfile, level)
outfile.write('ySize = %d,\n' % (self.ySize,))
if self.xSize is not None and 'xSize' not in already_processed:
already_processed.append('xSize')
showIndent(outfile, level)
outfile.write('xSize = %d,\n' % (self.xSize,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('zSize', node)
if value is not None and 'zSize' not in already_processed:
already_processed.append('zSize')
try:
self.zSize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.zSize < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('ySize', node)
if value is not None and 'ySize' not in already_processed:
already_processed.append('ySize')
try:
self.ySize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.ySize < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('xSize', node)
if value is not None and 'xSize' not in already_processed:
already_processed.append('xSize')
try:
self.xSize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.xSize < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class GridLayout
class Instances(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, size=None, instance=None):
self.size = _cast(int, size)
if instance is None:
self.instance = []
else:
self.instance = instance
def factory(*args_, **kwargs_):
if Instances.subclass:
return Instances.subclass(*args_, **kwargs_)
else:
return Instances(*args_, **kwargs_)
factory = staticmethod(factory)
def get_instance(self): return self.instance
def set_instance(self, instance): self.instance = instance
def add_instance(self, value): self.instance.append(value)
def insert_instance(self, index, value): self.instance[index] = value
def get_size(self): return self.size
def set_size(self, size): self.size = size
def export(self, outfile, level, namespace_='', name_='Instances', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Instances')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Instances'):
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
def exportChildren(self, outfile, level, namespace_='', name_='Instances', fromsubclass_=False):
for instance_ in self.instance:
instance_.export(outfile, level, namespace_, name_='instance')
def hasContent_(self):
if (
self.instance
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Instances'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
showIndent(outfile, level)
outfile.write('size = %d,\n' % (self.size,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('instance=[\n')
level += 1
for instance_ in self.instance:
showIndent(outfile, level)
outfile.write('model_.Instance(\n')
instance_.exportLiteral(outfile, level, name_='Instance')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.append('size')
try:
self.size = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.size < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'instance':
obj_ = Instance.factory()
obj_.build(child_)
self.instance.append(obj_)
# end class Instances
class Instance(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, i=None, k=None, j=None, id=None, location=None):
self.i = _cast(int, i)
self.k = _cast(int, k)
self.j = _cast(int, j)
self.id = _cast(int, id)
self.location = location
def factory(*args_, **kwargs_):
if Instance.subclass:
return Instance.subclass(*args_, **kwargs_)
else:
return Instance(*args_, **kwargs_)
factory = staticmethod(factory)
def get_location(self): return self.location
def set_location(self, location): self.location = location
def get_i(self): return self.i
def set_i(self, i): self.i = i
def get_k(self): return self.k
def set_k(self, k): self.k = k
def get_j(self): return self.j
def set_j(self, j): self.j = j
def get_id(self): return self.id
def set_id(self, id): self.id = id
def export(self, outfile, level, namespace_='', name_='Instance', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Instance')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Instance'):
if self.i is not None and 'i' not in already_processed:
already_processed.append('i')
outfile.write(' i="%s"' % self.gds_format_integer(self.i, input_name='i'))
if self.k is not None and 'k' not in already_processed:
already_processed.append('k')
outfile.write(' k="%s"' % self.gds_format_integer(self.k, input_name='k'))
if self.j is not None and 'j' not in already_processed:
already_processed.append('j')
outfile.write(' j="%s"' % self.gds_format_integer(self.j, input_name='j'))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id'))
def exportChildren(self, outfile, level, namespace_='', name_='Instance', fromsubclass_=False):
if self.location is not None:
self.location.export(outfile, level, namespace_, name_='location', )
def hasContent_(self):
if (
self.location is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Instance'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.i is not None and 'i' not in already_processed:
already_processed.append('i')
showIndent(outfile, level)
outfile.write('i = %d,\n' % (self.i,))
if self.k is not None and 'k' not in already_processed:
already_processed.append('k')
showIndent(outfile, level)
outfile.write('k = %d,\n' % (self.k,))
if self.j is not None and 'j' not in already_processed:
already_processed.append('j')
showIndent(outfile, level)
outfile.write('j = %d,\n' % (self.j,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = %d,\n' % (self.id,))
def exportLiteralChildren(self, outfile, level, name_):
if self.location is not None:
showIndent(outfile, level)
outfile.write('location=model_.Location(\n')
self.location.exportLiteral(outfile, level, name_='location')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('i', node)
if value is not None and 'i' not in already_processed:
already_processed.append('i')
try:
self.i = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.i < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('k', node)
if value is not None and 'k' not in already_processed:
already_processed.append('k')
try:
self.k = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.k < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('j', node)
if value is not None and 'j' not in already_processed:
already_processed.append('j')
try:
self.j = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.j < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
try:
self.id = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.id < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'location':
obj_ = Location.factory()
obj_.build(child_)
self.set_location(obj_)
# end class Instance
class Location(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, y=None, x=None, z=None):
self.y = _cast(float, y)
self.x = _cast(float, x)
self.z = _cast(float, z)
pass
def factory(*args_, **kwargs_):
if Location.subclass:
return Location.subclass(*args_, **kwargs_)
else:
return Location(*args_, **kwargs_)
factory = staticmethod(factory)
def get_y(self): return self.y
def set_y(self, y): self.y = y
def get_x(self): return self.x
def set_x(self, x): self.x = x
def get_z(self): return self.z
def set_z(self, z): self.z = z
def export(self, outfile, level, namespace_='', name_='Location', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Location')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Location'):
if self.y is not None and 'y' not in already_processed:
already_processed.append('y')
outfile.write(' y="%s"' % self.gds_format_float(self.y, input_name='y'))
if self.x is not None and 'x' not in already_processed:
already_processed.append('x')
outfile.write(' x="%s"' % self.gds_format_float(self.x, input_name='x'))
if self.z is not None and 'z' not in already_processed:
already_processed.append('z')
outfile.write(' z="%s"' % self.gds_format_float(self.z, input_name='z'))
def exportChildren(self, outfile, level, namespace_='', name_='Location', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Location'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.y is not None and 'y' not in already_processed:
already_processed.append('y')
showIndent(outfile, level)
outfile.write('y = %f,\n' % (self.y,))
if self.x is not None and 'x' not in already_processed:
already_processed.append('x')
showIndent(outfile, level)
outfile.write('x = %f,\n' % (self.x,))
if self.z is not None and 'z' not in already_processed:
already_processed.append('z')
showIndent(outfile, level)
outfile.write('z = %f,\n' % (self.z,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('y', node)
if value is not None and 'y' not in already_processed:
already_processed.append('y')
try:
self.y = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (y): %s' % exp)
value = find_attr_value_('x', node)
if value is not None and 'x' not in already_processed:
already_processed.append('x')
try:
self.x = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (x): %s' % exp)
value = find_attr_value_('z', node)
if value is not None and 'z' not in already_processed:
already_processed.append('z')
try:
self.z = float(value)
except ValueError, exp:
raise ValueError('Bad float/double attribute (z): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Location
class SynapticConnection(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, to=None, synapse=None, fromxx=None):
self.to = _cast(None, to)
self.synapse = _cast(None, synapse)
self.fromxx = _cast(None, fromxx)
pass
def factory(*args_, **kwargs_):
if SynapticConnection.subclass:
return SynapticConnection.subclass(*args_, **kwargs_)
else:
return SynapticConnection(*args_, **kwargs_)
factory = staticmethod(factory)
def get_to(self): return self.to
def set_to(self, to): self.to = to
def get_synapse(self): return self.synapse
def set_synapse(self, synapse): self.synapse = synapse
def get_from(self): return self.fromxx
def set_from(self, fromxx): self.fromxx = fromxx
def export(self, outfile, level, namespace_='', name_='SynapticConnection', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SynapticConnection')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SynapticConnection'):
if self.to is not None and 'to' not in already_processed:
already_processed.append('to')
outfile.write(' to=%s' % (self.gds_format_string(quote_attrib(self.to).encode(ExternalEncoding), input_name='to'), ))
if self.synapse is not None and 'synapse' not in already_processed:
already_processed.append('synapse')
outfile.write(' synapse=%s' % (self.gds_format_string(quote_attrib(self.synapse).encode(ExternalEncoding), input_name='synapse'), ))
if self.fromxx is not None and 'fromxx' not in already_processed:
already_processed.append('fromxx')
outfile.write(' from=%s' % (self.gds_format_string(quote_attrib(self.fromxx).encode(ExternalEncoding), input_name='from'), ))
def exportChildren(self, outfile, level, namespace_='', name_='SynapticConnection', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='SynapticConnection'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.to is not None and 'to' not in already_processed:
already_processed.append('to')
showIndent(outfile, level)
outfile.write('to = "%s",\n' % (self.to,))
if self.synapse is not None and 'synapse' not in already_processed:
already_processed.append('synapse')
showIndent(outfile, level)
outfile.write('synapse = "%s",\n' % (self.synapse,))
if self.fromxx is not None and 'fromxx' not in already_processed:
already_processed.append('fromxx')
showIndent(outfile, level)
outfile.write('fromxx = "%s",\n' % (self.fromxx,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('to', node)
if value is not None and 'to' not in already_processed:
already_processed.append('to')
self.to = value
value = find_attr_value_('synapse', node)
if value is not None and 'synapse' not in already_processed:
already_processed.append('synapse')
self.synapse = value
value = find_attr_value_('from', node)
if value is not None and 'from' not in already_processed:
already_processed.append('from')
self.fromxx = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class SynapticConnection
class Connection(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, to=None, fromxx=None):
self.to = _cast(None, to)
self.fromxx = _cast(None, fromxx)
pass
def factory(*args_, **kwargs_):
if Connection.subclass:
return Connection.subclass(*args_, **kwargs_)
else:
return Connection(*args_, **kwargs_)
factory = staticmethod(factory)
def get_to(self): return self.to
def set_to(self, to): self.to = to
def get_from(self): return self.fromxx
def set_from(self, fromxx): self.fromxx = fromxx
def export(self, outfile, level, namespace_='', name_='Connection', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Connection')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Connection'):
if self.to is not None and 'to' not in already_processed:
already_processed.append('to')
outfile.write(' to=%s' % (self.gds_format_string(quote_attrib(self.to).encode(ExternalEncoding), input_name='to'), ))
if self.fromxx is not None and 'fromxx' not in already_processed:
already_processed.append('fromxx')
outfile.write(' from=%s' % (self.gds_format_string(quote_attrib(self.fromxx).encode(ExternalEncoding), input_name='from'), ))
def exportChildren(self, outfile, level, namespace_='', name_='Connection', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Connection'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.to is not None and 'to' not in already_processed:
already_processed.append('to')
showIndent(outfile, level)
outfile.write('to = "%s",\n' % (self.to,))
if self.fromxx is not None and 'fromxx' not in already_processed:
already_processed.append('fromxx')
showIndent(outfile, level)
outfile.write('fromxx = "%s",\n' % (self.fromxx,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('to', node)
if value is not None and 'to' not in already_processed:
already_processed.append('to')
self.to = value
value = find_attr_value_('from', node)
if value is not None and 'from' not in already_processed:
already_processed.append('from')
self.fromxx = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Connection
class ExplicitInput(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, input=None, target=None):
self.input = _cast(None, input)
self.target = _cast(None, target)
pass
def factory(*args_, **kwargs_):
if ExplicitInput.subclass:
return ExplicitInput.subclass(*args_, **kwargs_)
else:
return ExplicitInput(*args_, **kwargs_)
factory = staticmethod(factory)
def get_input(self): return self.input
def set_input(self, input): self.input = input
def get_target(self): return self.target
def set_target(self, target): self.target = target
def export(self, outfile, level, namespace_='', name_='ExplicitInput', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ExplicitInput')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ExplicitInput'):
if self.input is not None and 'input' not in already_processed:
already_processed.append('input')
outfile.write(' input=%s' % (self.gds_format_string(quote_attrib(self.input).encode(ExternalEncoding), input_name='input'), ))
if self.target is not None and 'target' not in already_processed:
already_processed.append('target')
outfile.write(' target=%s' % (self.gds_format_string(quote_attrib(self.target).encode(ExternalEncoding), input_name='target'), ))
def exportChildren(self, outfile, level, namespace_='', name_='ExplicitInput', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ExplicitInput'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.input is not None and 'input' not in already_processed:
already_processed.append('input')
showIndent(outfile, level)
outfile.write('input = "%s",\n' % (self.input,))
if self.target is not None and 'target' not in already_processed:
already_processed.append('target')
showIndent(outfile, level)
outfile.write('target = "%s",\n' % (self.target,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('input', node)
if value is not None and 'input' not in already_processed:
already_processed.append('input')
self.input = value
value = find_attr_value_('target', node)
if value is not None and 'target' not in already_processed:
already_processed.append('target')
self.target = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ExplicitInput
class Base(GeneratedsSuper):
"""Anything which can have a unique id (within its parent) i.e. most
elements."""
subclass = None
superclass = None
def __init__(self, id=None, neuroLexId=None, extensiontype_=None):
self.id = _cast(None, id)
self.neuroLexId = _cast(None, neuroLexId)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if Base.subclass:
return Base.subclass(*args_, **kwargs_)
else:
return Base(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_neuroLexId(self): return self.neuroLexId
def set_neuroLexId(self, neuroLexId): self.neuroLexId = neuroLexId
def validate_NeuroLexId(self, value):
# Validate type NeuroLexId, a restriction on xs:string.
pass
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='Base', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Base')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Base'):
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (quote_attrib(self.id), ))
if self.neuroLexId is not None and 'neuroLexId' not in already_processed:
already_processed.append('neuroLexId')
outfile.write(' neuroLexId=%s' % (quote_attrib(self.neuroLexId), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='Base', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Base'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
if self.neuroLexId is not None and 'neuroLexId' not in already_processed:
already_processed.append('neuroLexId')
showIndent(outfile, level)
outfile.write('neuroLexId = "%s",\n' % (self.neuroLexId,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.validate_NmlId(self.id) # validate type NmlId
value = find_attr_value_('neuroLexId', node)
if value is not None and 'neuroLexId' not in already_processed:
already_processed.append('neuroLexId')
self.neuroLexId = value
self.validate_NeuroLexId(self.neuroLexId) # validate type NeuroLexId
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Base
class Standalone(Base):
"""Elements which can stand alone and be referenced by id, e.g. cell,
morphology."""
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, extensiontype_=None):
super(Standalone, self).__init__(id, neuroLexId, extensiontype_, )
self.metaid = _cast(None, metaid)
self.notes = notes
self.annotation = annotation
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if Standalone.subclass:
return Standalone.subclass(*args_, **kwargs_)
else:
return Standalone(*args_, **kwargs_)
factory = staticmethod(factory)
def get_notes(self): return self.notes
def set_notes(self, notes): self.notes = notes
def validate_Notes(self, value):
# Validate type Notes, a restriction on xs:string.
pass
def get_annotation(self): return self.annotation
def set_annotation(self, annotation): self.annotation = annotation
def get_metaid(self): return self.metaid
def set_metaid(self, metaid): self.metaid = metaid
def validate_MetaId(self, value):
# Validate type MetaId, a restriction on xs:string.
pass
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='Standalone', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Standalone')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Standalone'):
super(Standalone, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Standalone')
if self.metaid is not None and 'metaid' not in already_processed:
already_processed.append('metaid')
outfile.write(' metaid=%s' % (quote_attrib(self.metaid), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='Standalone', fromsubclass_=False):
super(Standalone, self).exportChildren(outfile, level, namespace_, name_, True)
if self.notes is not None:
showIndent(outfile, level)
outfile.write('<%snotes>%s</%snotes>\n' % (namespace_, self.gds_format_string(quote_xml(self.notes).encode(ExternalEncoding), input_name='notes'), namespace_))
if self.annotation is not None:
self.annotation.export(outfile, level, namespace_, name_='annotation')
def hasContent_(self):
if (
self.notes is not None or
self.annotation is not None or
super(Standalone, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Standalone'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.metaid is not None and 'metaid' not in already_processed:
already_processed.append('metaid')
showIndent(outfile, level)
outfile.write('metaid = "%s",\n' % (self.metaid,))
super(Standalone, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Standalone, self).exportLiteralChildren(outfile, level, name_)
if self.notes is not None:
showIndent(outfile, level)
outfile.write('notes=%s,\n' % quote_python(self.notes).encode(ExternalEncoding))
if self.annotation is not None:
showIndent(outfile, level)
outfile.write('annotation=model_.Annotation(\n')
self.annotation.exportLiteral(outfile, level, name_='annotation')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('metaid', node)
if value is not None and 'metaid' not in already_processed:
already_processed.append('metaid')
self.metaid = value
self.validate_MetaId(self.metaid) # validate type MetaId
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
super(Standalone, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'notes':
notes_ = child_.text
notes_ = self.gds_validate_string(notes_, node, 'notes')
self.notes = notes_
self.validate_Notes(self.notes) # validate type Notes
elif nodeName_ == 'annotation':
obj_ = Annotation.factory()
obj_.build(child_)
self.set_annotation(obj_)
super(Standalone, self).buildChildren(child_, node, nodeName_, True)
# end class Standalone
class Projection(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, anytypeobjs_=None):
super(Projection, self).__init__(id, neuroLexId, )
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if Projection.subclass:
return Projection.subclass(*args_, **kwargs_)
else:
return Projection(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='', name_='Projection', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Projection')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Projection'):
super(Projection, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Projection')
def exportChildren(self, outfile, level, namespace_='', name_='Projection', fromsubclass_=False):
super(Projection, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(Projection, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Projection'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(Projection, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Projection, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(Projection, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'Projection')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(Projection, self).buildChildren(child_, node, nodeName_, True)
# end class Projection
class CellSet(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, select=None, anytypeobjs_=None):
super(CellSet, self).__init__(id, neuroLexId, )
self.select = _cast(None, select)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if CellSet.subclass:
return CellSet.subclass(*args_, **kwargs_)
else:
return CellSet(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_select(self): return self.select
def set_select(self, select): self.select = select
def export(self, outfile, level, namespace_='', name_='CellSet', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CellSet')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='CellSet'):
super(CellSet, self).exportAttributes(outfile, level, already_processed, namespace_, name_='CellSet')
if self.select is not None and 'select' not in already_processed:
already_processed.append('select')
outfile.write(' select=%s' % (self.gds_format_string(quote_attrib(self.select).encode(ExternalEncoding), input_name='select'), ))
def exportChildren(self, outfile, level, namespace_='', name_='CellSet', fromsubclass_=False):
super(CellSet, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(CellSet, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='CellSet'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.select is not None and 'select' not in already_processed:
already_processed.append('select')
showIndent(outfile, level)
outfile.write('select = "%s",\n' % (self.select,))
super(CellSet, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(CellSet, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('select', node)
if value is not None and 'select' not in already_processed:
already_processed.append('select')
self.select = value
super(CellSet, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'CellSet')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(CellSet, self).buildChildren(child_, node, nodeName_, True)
# end class CellSet
class Population(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, cell=None, extracellularProperties=None, component=None, network=None, size=None, layout=None, instances=None):
super(Population, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.cell = _cast(None, cell)
self.extracellularProperties = _cast(None, extracellularProperties)
self.component = _cast(None, component)
self.network = _cast(None, network)
self.size = _cast(int, size)
self.layout = layout
self.instances = instances
def factory(*args_, **kwargs_):
if Population.subclass:
return Population.subclass(*args_, **kwargs_)
else:
return Population(*args_, **kwargs_)
factory = staticmethod(factory)
def get_layout(self): return self.layout
def set_layout(self, layout): self.layout = layout
def get_instances(self): return self.instances
def set_instances(self, instances): self.instances = instances
def get_cell(self): return self.cell
def set_cell(self, cell): self.cell = cell
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_extracellularProperties(self): return self.extracellularProperties
def set_extracellularProperties(self, extracellularProperties): self.extracellularProperties = extracellularProperties
def get_component(self): return self.component
def set_component(self, component): self.component = component
def get_network(self): return self.network
def set_network(self, network): self.network = network
def get_size(self): return self.size
def set_size(self, size): self.size = size
def export(self, outfile, level, namespace_='', name_='Population', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Population')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Population'):
super(Population, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Population')
if self.cell is not None and 'cell' not in already_processed:
already_processed.append('cell')
outfile.write(' cell=%s' % (quote_attrib(self.cell), ))
if self.extracellularProperties is not None and 'extracellularProperties' not in already_processed:
already_processed.append('extracellularProperties')
outfile.write(' extracellularProperties=%s' % (quote_attrib(self.extracellularProperties), ))
if self.component is not None and 'component' not in already_processed:
already_processed.append('component')
outfile.write(' component=%s' % (quote_attrib(self.component), ))
if self.network is not None and 'network' not in already_processed:
already_processed.append('network')
outfile.write(' network=%s' % (quote_attrib(self.network), ))
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
def exportChildren(self, outfile, level, namespace_='', name_='Population', fromsubclass_=False):
super(Population, self).exportChildren(outfile, level, namespace_, name_, True)
if self.layout is not None:
self.layout.export(outfile, level, namespace_, name_='layout')
if self.instances is not None:
self.instances.export(outfile, level, namespace_, name_='instances', )
def hasContent_(self):
if (
self.layout is not None or
self.instances is not None or
super(Population, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Population'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.cell is not None and 'cell' not in already_processed:
already_processed.append('cell')
showIndent(outfile, level)
outfile.write('cell = "%s",\n' % (self.cell,))
if self.extracellularProperties is not None and 'extracellularProperties' not in already_processed:
already_processed.append('extracellularProperties')
showIndent(outfile, level)
outfile.write('extracellularProperties = "%s",\n' % (self.extracellularProperties,))
if self.component is not None and 'component' not in already_processed:
already_processed.append('component')
showIndent(outfile, level)
outfile.write('component = "%s",\n' % (self.component,))
if self.network is not None and 'network' not in already_processed:
already_processed.append('network')
showIndent(outfile, level)
outfile.write('network = "%s",\n' % (self.network,))
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
showIndent(outfile, level)
outfile.write('size = %d,\n' % (self.size,))
super(Population, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Population, self).exportLiteralChildren(outfile, level, name_)
if self.layout is not None:
showIndent(outfile, level)
outfile.write('layout=model_.Layout(\n')
self.layout.exportLiteral(outfile, level, name_='layout')
showIndent(outfile, level)
outfile.write('),\n')
if self.instances is not None:
showIndent(outfile, level)
outfile.write('instances=model_.Instances(\n')
self.instances.exportLiteral(outfile, level, name_='instances')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('cell', node)
if value is not None and 'cell' not in already_processed:
already_processed.append('cell')
self.cell = value
self.validate_NmlId(self.cell) # validate type NmlId
value = find_attr_value_('extracellularProperties', node)
if value is not None and 'extracellularProperties' not in already_processed:
already_processed.append('extracellularProperties')
self.extracellularProperties = value
self.validate_NmlId(self.extracellularProperties) # validate type NmlId
value = find_attr_value_('component', node)
if value is not None and 'component' not in already_processed:
already_processed.append('component')
self.component = value
self.validate_NmlId(self.component) # validate type NmlId
value = find_attr_value_('network', node)
if value is not None and 'network' not in already_processed:
already_processed.append('network')
self.network = value
self.validate_NmlId(self.network) # validate type NmlId
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.append('size')
try:
self.size = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
super(Population, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'layout':
obj_ = Layout.factory()
obj_.build(child_)
self.set_layout(obj_)
elif nodeName_ == 'instances':
obj_ = Instances.factory()
obj_.build(child_)
self.set_instances(obj_)
super(Population, self).buildChildren(child_, node, nodeName_, True)
# end class Population
class Region(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, space=None, anytypeobjs_=None):
super(Region, self).__init__(id, neuroLexId, )
self.space = _cast(None, space)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if Region.subclass:
return Region.subclass(*args_, **kwargs_)
else:
return Region(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_space(self): return self.space
def set_space(self, space): self.space = space
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='Region', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Region')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Region'):
super(Region, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Region')
if self.space is not None and 'space' not in already_processed:
already_processed.append('space')
outfile.write(' space=%s' % (quote_attrib(self.space), ))
def exportChildren(self, outfile, level, namespace_='', name_='Region', fromsubclass_=False):
super(Region, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(Region, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Region'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.space is not None and 'space' not in already_processed:
already_processed.append('space')
showIndent(outfile, level)
outfile.write('space = "%s",\n' % (self.space,))
super(Region, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Region, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('space', node)
if value is not None and 'space' not in already_processed:
already_processed.append('space')
self.space = value
self.validate_NmlId(self.space) # validate type NmlId
super(Region, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'Region')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(Region, self).buildChildren(child_, node, nodeName_, True)
# end class Region
class Space(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, basedOn=None, structure=None):
super(Space, self).__init__(id, neuroLexId, )
self.basedOn = _cast(None, basedOn)
self.structure = structure
def factory(*args_, **kwargs_):
if Space.subclass:
return Space.subclass(*args_, **kwargs_)
else:
return Space(*args_, **kwargs_)
factory = staticmethod(factory)
def get_structure(self): return self.structure
def set_structure(self, structure): self.structure = structure
def get_basedOn(self): return self.basedOn
def set_basedOn(self, basedOn): self.basedOn = basedOn
def validate_allowedSpaces(self, value):
# Validate type allowedSpaces, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='Space', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Space')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Space'):
super(Space, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Space')
if self.basedOn is not None and 'basedOn' not in already_processed:
already_processed.append('basedOn')
outfile.write(' basedOn=%s' % (quote_attrib(self.basedOn), ))
def exportChildren(self, outfile, level, namespace_='', name_='Space', fromsubclass_=False):
super(Space, self).exportChildren(outfile, level, namespace_, name_, True)
if self.structure is not None:
self.structure.export(outfile, level, namespace_, name_='structure')
def hasContent_(self):
if (
self.structure is not None or
super(Space, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Space'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.basedOn is not None and 'basedOn' not in already_processed:
already_processed.append('basedOn')
showIndent(outfile, level)
outfile.write('basedOn = "%s",\n' % (self.basedOn,))
super(Space, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Space, self).exportLiteralChildren(outfile, level, name_)
if self.structure is not None:
showIndent(outfile, level)
outfile.write('structure=model_.SpaceStructure(\n')
self.structure.exportLiteral(outfile, level, name_='structure')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('basedOn', node)
if value is not None and 'basedOn' not in already_processed:
already_processed.append('basedOn')
self.basedOn = value
self.validate_allowedSpaces(self.basedOn) # validate type allowedSpaces
super(Space, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'structure':
obj_ = SpaceStructure.factory()
obj_.build(child_)
self.set_structure(obj_)
super(Space, self).buildChildren(child_, node, nodeName_, True)
# end class Space
class Network(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, space=None, region=None, population=None, cellSet=None, projection=None, synapticConnection=None, connection=None, explicitInput=None):
super(Network, self).__init__(id, neuroLexId, metaid, notes, annotation, )
if space is None:
self.space = []
else:
self.space = space
if region is None:
self.region = []
else:
self.region = region
if population is None:
self.population = []
else:
self.population = population
if cellSet is None:
self.cellSet = []
else:
self.cellSet = cellSet
if projection is None:
self.projection = []
else:
self.projection = projection
if synapticConnection is None:
self.synapticConnection = []
else:
self.synapticConnection = synapticConnection
if connection is None:
self.connection = []
else:
self.connection = connection
if explicitInput is None:
self.explicitInput = []
else:
self.explicitInput = explicitInput
def factory(*args_, **kwargs_):
if Network.subclass:
return Network.subclass(*args_, **kwargs_)
else:
return Network(*args_, **kwargs_)
factory = staticmethod(factory)
def get_space(self): return self.space
def set_space(self, space): self.space = space
def add_space(self, value): self.space.append(value)
def insert_space(self, index, value): self.space[index] = value
def get_region(self): return self.region
def set_region(self, region): self.region = region
def add_region(self, value): self.region.append(value)
def insert_region(self, index, value): self.region[index] = value
def get_population(self): return self.population
def set_population(self, population): self.population = population
def add_population(self, value): self.population.append(value)
def insert_population(self, index, value): self.population[index] = value
def get_cellSet(self): return self.cellSet
def set_cellSet(self, cellSet): self.cellSet = cellSet
def add_cellSet(self, value): self.cellSet.append(value)
def insert_cellSet(self, index, value): self.cellSet[index] = value
def get_projection(self): return self.projection
def set_projection(self, projection): self.projection = projection
def add_projection(self, value): self.projection.append(value)
def insert_projection(self, index, value): self.projection[index] = value
def get_synapticConnection(self): return self.synapticConnection
def set_synapticConnection(self, synapticConnection): self.synapticConnection = synapticConnection
def add_synapticConnection(self, value): self.synapticConnection.append(value)
def insert_synapticConnection(self, index, value): self.synapticConnection[index] = value
def get_connection(self): return self.connection
def set_connection(self, connection): self.connection = connection
def add_connection(self, value): self.connection.append(value)
def insert_connection(self, index, value): self.connection[index] = value
def get_explicitInput(self): return self.explicitInput
def set_explicitInput(self, explicitInput): self.explicitInput = explicitInput
def add_explicitInput(self, value): self.explicitInput.append(value)
def insert_explicitInput(self, index, value): self.explicitInput[index] = value
def export(self, outfile, level, namespace_='', name_='Network', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Network')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Network'):
super(Network, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Network')
def exportChildren(self, outfile, level, namespace_='', name_='Network', fromsubclass_=False):
super(Network, self).exportChildren(outfile, level, namespace_, name_, True)
for space_ in self.space:
space_.export(outfile, level, namespace_, name_='space')
for region_ in self.region:
region_.export(outfile, level, namespace_, name_='region')
for population_ in self.population:
population_.export(outfile, level, namespace_, name_='population')
for cellSet_ in self.cellSet:
cellSet_.export(outfile, level, namespace_, name_='cellSet')
for projection_ in self.projection:
projection_.export(outfile, level, namespace_, name_='projection')
for synapticConnection_ in self.synapticConnection:
synapticConnection_.export(outfile, level, namespace_, name_='synapticConnection')
for connection_ in self.connection:
connection_.export(outfile, level, namespace_, name_='connection')
for explicitInput_ in self.explicitInput:
explicitInput_.export(outfile, level, namespace_, name_='explicitInput')
def hasContent_(self):
if (
self.space or
self.region or
self.population or
self.cellSet or
self.projection or
self.synapticConnection or
self.connection or
self.explicitInput or
super(Network, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Network'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(Network, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Network, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('space=[\n')
level += 1
for space_ in self.space:
showIndent(outfile, level)
outfile.write('model_.Space(\n')
space_.exportLiteral(outfile, level, name_='Space')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('region=[\n')
level += 1
for region_ in self.region:
showIndent(outfile, level)
outfile.write('model_.Region(\n')
region_.exportLiteral(outfile, level, name_='Region')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('population=[\n')
level += 1
for population_ in self.population:
showIndent(outfile, level)
outfile.write('model_.Population(\n')
population_.exportLiteral(outfile, level, name_='Population')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('cellSet=[\n')
level += 1
for cellSet_ in self.cellSet:
showIndent(outfile, level)
outfile.write('model_.CellSet(\n')
cellSet_.exportLiteral(outfile, level, name_='CellSet')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('projection=[\n')
level += 1
for projection_ in self.projection:
showIndent(outfile, level)
outfile.write('model_.Projection(\n')
projection_.exportLiteral(outfile, level, name_='Projection')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('synapticConnection=[\n')
level += 1
for synapticConnection_ in self.synapticConnection:
showIndent(outfile, level)
outfile.write('model_.SynapticConnection(\n')
synapticConnection_.exportLiteral(outfile, level, name_='SynapticConnection')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('connection=[\n')
level += 1
for connection_ in self.connection:
showIndent(outfile, level)
outfile.write('model_.Connection(\n')
connection_.exportLiteral(outfile, level, name_='Connection')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('explicitInput=[\n')
level += 1
for explicitInput_ in self.explicitInput:
showIndent(outfile, level)
outfile.write('model_.ExplicitInput(\n')
explicitInput_.exportLiteral(outfile, level, name_='ExplicitInput')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(Network, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'space':
obj_ = Space.factory()
obj_.build(child_)
self.space.append(obj_)
elif nodeName_ == 'region':
obj_ = Region.factory()
obj_.build(child_)
self.region.append(obj_)
elif nodeName_ == 'population':
obj_ = Population.factory()
obj_.build(child_)
self.population.append(obj_)
elif nodeName_ == 'cellSet':
obj_ = CellSet.factory()
obj_.build(child_)
self.cellSet.append(obj_)
elif nodeName_ == 'projection':
obj_ = Projection.factory()
obj_.build(child_)
self.projection.append(obj_)
elif nodeName_ == 'synapticConnection':
obj_ = SynapticConnection.factory()
obj_.build(child_)
self.synapticConnection.append(obj_)
elif nodeName_ == 'connection':
obj_ = Connection.factory()
obj_.build(child_)
self.connection.append(obj_)
elif nodeName_ == 'explicitInput':
obj_ = ExplicitInput.factory()
obj_.build(child_)
self.explicitInput.append(obj_)
super(Network, self).buildChildren(child_, node, nodeName_, True)
# end class Network
class PulseGenerator(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, delay=None, duration=None, amplitude=None):
super(PulseGenerator, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.delay = _cast(None, delay)
self.duration = _cast(None, duration)
self.amplitude = _cast(None, amplitude)
pass
def factory(*args_, **kwargs_):
if PulseGenerator.subclass:
return PulseGenerator.subclass(*args_, **kwargs_)
else:
return PulseGenerator(*args_, **kwargs_)
factory = staticmethod(factory)
def get_delay(self): return self.delay
def set_delay(self, delay): self.delay = delay
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_duration(self): return self.duration
def set_duration(self, duration): self.duration = duration
def get_amplitude(self): return self.amplitude
def set_amplitude(self, amplitude): self.amplitude = amplitude
def validate_Nml2Quantity_current(self, value):
# Validate type Nml2Quantity_current, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='PulseGenerator', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PulseGenerator')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='PulseGenerator'):
super(PulseGenerator, self).exportAttributes(outfile, level, already_processed, namespace_, name_='PulseGenerator')
if self.delay is not None and 'delay' not in already_processed:
already_processed.append('delay')
outfile.write(' delay=%s' % (quote_attrib(self.delay), ))
if self.duration is not None and 'duration' not in already_processed:
already_processed.append('duration')
outfile.write(' duration=%s' % (quote_attrib(self.duration), ))
if self.amplitude is not None and 'amplitude' not in already_processed:
already_processed.append('amplitude')
outfile.write(' amplitude=%s' % (quote_attrib(self.amplitude), ))
def exportChildren(self, outfile, level, namespace_='', name_='PulseGenerator', fromsubclass_=False):
super(PulseGenerator, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(PulseGenerator, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='PulseGenerator'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.delay is not None and 'delay' not in already_processed:
already_processed.append('delay')
showIndent(outfile, level)
outfile.write('delay = "%s",\n' % (self.delay,))
if self.duration is not None and 'duration' not in already_processed:
already_processed.append('duration')
showIndent(outfile, level)
outfile.write('duration = "%s",\n' % (self.duration,))
if self.amplitude is not None and 'amplitude' not in already_processed:
already_processed.append('amplitude')
showIndent(outfile, level)
outfile.write('amplitude = "%s",\n' % (self.amplitude,))
super(PulseGenerator, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(PulseGenerator, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('delay', node)
if value is not None and 'delay' not in already_processed:
already_processed.append('delay')
self.delay = value
self.validate_Nml2Quantity_time(self.delay) # validate type Nml2Quantity_time
value = find_attr_value_('duration', node)
if value is not None and 'duration' not in already_processed:
already_processed.append('duration')
self.duration = value
self.validate_Nml2Quantity_time(self.duration) # validate type Nml2Quantity_time
value = find_attr_value_('amplitude', node)
if value is not None and 'amplitude' not in already_processed:
already_processed.append('amplitude')
self.amplitude = value
self.validate_Nml2Quantity_current(self.amplitude) # validate type Nml2Quantity_current
super(PulseGenerator, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(PulseGenerator, self).buildChildren(child_, node, nodeName_, True)
pass
# end class PulseGenerator
class ReactionScheme(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, source=None, type_=None, anytypeobjs_=None):
super(ReactionScheme, self).__init__(id, neuroLexId, )
self.source = _cast(None, source)
self.type_ = _cast(None, type_)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if ReactionScheme.subclass:
return ReactionScheme.subclass(*args_, **kwargs_)
else:
return ReactionScheme(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_source(self): return self.source
def set_source(self, source): self.source = source
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def export(self, outfile, level, namespace_='', name_='ReactionScheme', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ReactionScheme')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ReactionScheme'):
super(ReactionScheme, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ReactionScheme')
if self.source is not None and 'source' not in already_processed:
already_processed.append('source')
outfile.write(' source=%s' % (self.gds_format_string(quote_attrib(self.source).encode(ExternalEncoding), input_name='source'), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
def exportChildren(self, outfile, level, namespace_='', name_='ReactionScheme', fromsubclass_=False):
super(ReactionScheme, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(ReactionScheme, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ReactionScheme'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.source is not None and 'source' not in already_processed:
already_processed.append('source')
showIndent(outfile, level)
outfile.write('source = "%s",\n' % (self.source,))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
super(ReactionScheme, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ReactionScheme, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('source', node)
if value is not None and 'source' not in already_processed:
already_processed.append('source')
self.source = value
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
super(ReactionScheme, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'ReactionScheme')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(ReactionScheme, self).buildChildren(child_, node, nodeName_, True)
# end class ReactionScheme
class ExtracellularProperties(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, temperature=None, species=None):
super(ExtracellularProperties, self).__init__(id, neuroLexId, )
self.temperature = _cast(None, temperature)
if species is None:
self.species = []
else:
self.species = species
def factory(*args_, **kwargs_):
if ExtracellularProperties.subclass:
return ExtracellularProperties.subclass(*args_, **kwargs_)
else:
return ExtracellularProperties(*args_, **kwargs_)
factory = staticmethod(factory)
def get_species(self): return self.species
def set_species(self, species): self.species = species
def add_species(self, value): self.species.append(value)
def insert_species(self, index, value): self.species[index] = value
def get_temperature(self): return self.temperature
def set_temperature(self, temperature): self.temperature = temperature
def validate_Nml2Quantity_temperature(self, value):
# Validate type Nml2Quantity_temperature, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='ExtracellularProperties', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ExtracellularProperties')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ExtracellularProperties'):
super(ExtracellularProperties, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ExtracellularProperties')
if self.temperature is not None and 'temperature' not in already_processed:
already_processed.append('temperature')
outfile.write(' temperature=%s' % (quote_attrib(self.temperature), ))
def exportChildren(self, outfile, level, namespace_='', name_='ExtracellularProperties', fromsubclass_=False):
super(ExtracellularProperties, self).exportChildren(outfile, level, namespace_, name_, True)
for species_ in self.species:
species_.export(outfile, level, namespace_, name_='species')
def hasContent_(self):
if (
self.species or
super(ExtracellularProperties, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ExtracellularProperties'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.temperature is not None and 'temperature' not in already_processed:
already_processed.append('temperature')
showIndent(outfile, level)
outfile.write('temperature = "%s",\n' % (self.temperature,))
super(ExtracellularProperties, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ExtracellularProperties, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('species=[\n')
level += 1
for species_ in self.species:
showIndent(outfile, level)
outfile.write('model_.Species(\n')
species_.exportLiteral(outfile, level, name_='Species')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('temperature', node)
if value is not None and 'temperature' not in already_processed:
already_processed.append('temperature')
self.temperature = value
self.validate_Nml2Quantity_temperature(self.temperature) # validate type Nml2Quantity_temperature
super(ExtracellularProperties, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'species':
obj_ = Species.factory()
obj_.build(child_)
self.species.append(obj_)
super(ExtracellularProperties, self).buildChildren(child_, node, nodeName_, True)
# end class ExtracellularProperties
class ChannelDensity(Base):
"""Specifying the ion here again is redundant, this will be set in
ionChannel. It is added here TEMPORARILY as selecting all ca or
na conducting channel populations/densities in a cell would be
difficult otherwise. It should be removed in the longer term,
due to possible inconsistencies in this value and that in the
ionChannel element. TODO: remove."""
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, segmentGroup='all', ion=None, ionChannel=None, erev=None, condDensity=None, segment=None, variableParameter=None):
super(ChannelDensity, self).__init__(id, neuroLexId, )
self.segmentGroup = _cast(None, segmentGroup)
self.ion = _cast(None, ion)
self.ionChannel = _cast(None, ionChannel)
self.erev = _cast(None, erev)
self.condDensity = _cast(None, condDensity)
self.segment = _cast(None, segment)
if variableParameter is None:
self.variableParameter = []
else:
self.variableParameter = variableParameter
def factory(*args_, **kwargs_):
if ChannelDensity.subclass:
return ChannelDensity.subclass(*args_, **kwargs_)
else:
return ChannelDensity(*args_, **kwargs_)
factory = staticmethod(factory)
def get_variableParameter(self): return self.variableParameter
def set_variableParameter(self, variableParameter): self.variableParameter = variableParameter
def add_variableParameter(self, value): self.variableParameter.append(value)
def insert_variableParameter(self, index, value): self.variableParameter[index] = value
def get_segmentGroup(self): return self.segmentGroup
def set_segmentGroup(self, segmentGroup): self.segmentGroup = segmentGroup
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_ion(self): return self.ion
def set_ion(self, ion): self.ion = ion
def get_ionChannel(self): return self.ionChannel
def set_ionChannel(self, ionChannel): self.ionChannel = ionChannel
def get_erev(self): return self.erev
def set_erev(self, erev): self.erev = erev
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_condDensity(self): return self.condDensity
def set_condDensity(self, condDensity): self.condDensity = condDensity
def validate_Nml2Quantity_conductanceDensity(self, value):
# Validate type Nml2Quantity_conductanceDensity, a restriction on xs:string.
pass
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def export(self, outfile, level, namespace_='', name_='ChannelDensity', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ChannelDensity')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ChannelDensity'):
super(ChannelDensity, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ChannelDensity')
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
outfile.write(' segmentGroup=%s' % (quote_attrib(self.segmentGroup), ))
if self.ion is not None and 'ion' not in already_processed:
already_processed.append('ion')
outfile.write(' ion=%s' % (quote_attrib(self.ion), ))
if self.ionChannel is not None and 'ionChannel' not in already_processed:
already_processed.append('ionChannel')
outfile.write(' ionChannel=%s' % (quote_attrib(self.ionChannel), ))
if self.erev is not None and 'erev' not in already_processed:
already_processed.append('erev')
outfile.write(' erev=%s' % (quote_attrib(self.erev), ))
if self.condDensity is not None and 'condDensity' not in already_processed:
already_processed.append('condDensity')
outfile.write(' condDensity=%s' % (quote_attrib(self.condDensity), ))
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
outfile.write(' segment=%s' % (quote_attrib(self.segment), ))
def exportChildren(self, outfile, level, namespace_='', name_='ChannelDensity', fromsubclass_=False):
super(ChannelDensity, self).exportChildren(outfile, level, namespace_, name_, True)
for variableParameter_ in self.variableParameter:
variableParameter_.export(outfile, level, namespace_, name_='variableParameter')
def hasContent_(self):
if (
self.variableParameter or
super(ChannelDensity, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ChannelDensity'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
showIndent(outfile, level)
outfile.write('segmentGroup = "%s",\n' % (self.segmentGroup,))
if self.ion is not None and 'ion' not in already_processed:
already_processed.append('ion')
showIndent(outfile, level)
outfile.write('ion = "%s",\n' % (self.ion,))
if self.ionChannel is not None and 'ionChannel' not in already_processed:
already_processed.append('ionChannel')
showIndent(outfile, level)
outfile.write('ionChannel = "%s",\n' % (self.ionChannel,))
if self.erev is not None and 'erev' not in already_processed:
already_processed.append('erev')
showIndent(outfile, level)
outfile.write('erev = "%s",\n' % (self.erev,))
if self.condDensity is not None and 'condDensity' not in already_processed:
already_processed.append('condDensity')
showIndent(outfile, level)
outfile.write('condDensity = "%s",\n' % (self.condDensity,))
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
showIndent(outfile, level)
outfile.write('segment = "%s",\n' % (self.segment,))
super(ChannelDensity, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ChannelDensity, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('variableParameter=[\n')
level += 1
for variableParameter_ in self.variableParameter:
showIndent(outfile, level)
outfile.write('model_.VariableParameter(\n')
variableParameter_.exportLiteral(outfile, level, name_='VariableParameter')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('segmentGroup', node)
if value is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
self.segmentGroup = value
self.validate_NmlId(self.segmentGroup) # validate type NmlId
value = find_attr_value_('ion', node)
if value is not None and 'ion' not in already_processed:
already_processed.append('ion')
self.ion = value
self.validate_NmlId(self.ion) # validate type NmlId
value = find_attr_value_('ionChannel', node)
if value is not None and 'ionChannel' not in already_processed:
already_processed.append('ionChannel')
self.ionChannel = value
self.validate_NmlId(self.ionChannel) # validate type NmlId
value = find_attr_value_('erev', node)
if value is not None and 'erev' not in already_processed:
already_processed.append('erev')
self.erev = value
self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage
value = find_attr_value_('condDensity', node)
if value is not None and 'condDensity' not in already_processed:
already_processed.append('condDensity')
self.condDensity = value
self.validate_Nml2Quantity_conductanceDensity(self.condDensity) # validate type Nml2Quantity_conductanceDensity
value = find_attr_value_('segment', node)
if value is not None and 'segment' not in already_processed:
already_processed.append('segment')
self.segment = value
self.validate_NmlId(self.segment) # validate type NmlId
super(ChannelDensity, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'variableParameter':
obj_ = VariableParameter.factory()
obj_.build(child_)
self.variableParameter.append(obj_)
super(ChannelDensity, self).buildChildren(child_, node, nodeName_, True)
# end class ChannelDensity
class ChannelPopulation(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, ionChannel=None, segmentGroup='all', erev=None, number=None, segment=None, variableParameter=None):
super(ChannelPopulation, self).__init__(id, neuroLexId, )
self.ionChannel = _cast(None, ionChannel)
self.segmentGroup = _cast(None, segmentGroup)
self.erev = _cast(None, erev)
self.number = _cast(int, number)
self.segment = _cast(None, segment)
if variableParameter is None:
self.variableParameter = []
else:
self.variableParameter = variableParameter
def factory(*args_, **kwargs_):
if ChannelPopulation.subclass:
return ChannelPopulation.subclass(*args_, **kwargs_)
else:
return ChannelPopulation(*args_, **kwargs_)
factory = staticmethod(factory)
def get_variableParameter(self): return self.variableParameter
def set_variableParameter(self, variableParameter): self.variableParameter = variableParameter
def add_variableParameter(self, value): self.variableParameter.append(value)
def insert_variableParameter(self, index, value): self.variableParameter[index] = value
def get_ionChannel(self): return self.ionChannel
def set_ionChannel(self, ionChannel): self.ionChannel = ionChannel
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def get_segmentGroup(self): return self.segmentGroup
def set_segmentGroup(self, segmentGroup): self.segmentGroup = segmentGroup
def get_erev(self): return self.erev
def set_erev(self, erev): self.erev = erev
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_number(self): return self.number
def set_number(self, number): self.number = number
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def export(self, outfile, level, namespace_='', name_='ChannelPopulation', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ChannelPopulation')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ChannelPopulation'):
super(ChannelPopulation, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ChannelPopulation')
if self.ionChannel is not None and 'ionChannel' not in already_processed:
already_processed.append('ionChannel')
outfile.write(' ionChannel=%s' % (quote_attrib(self.ionChannel), ))
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
outfile.write(' segmentGroup=%s' % (quote_attrib(self.segmentGroup), ))
if self.erev is not None and 'erev' not in already_processed:
already_processed.append('erev')
outfile.write(' erev=%s' % (quote_attrib(self.erev), ))
if self.number is not None and 'number' not in already_processed:
already_processed.append('number')
outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number'))
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
outfile.write(' segment=%s' % (quote_attrib(self.segment), ))
def exportChildren(self, outfile, level, namespace_='', name_='ChannelPopulation', fromsubclass_=False):
super(ChannelPopulation, self).exportChildren(outfile, level, namespace_, name_, True)
for variableParameter_ in self.variableParameter:
variableParameter_.export(outfile, level, namespace_, name_='variableParameter')
def hasContent_(self):
if (
self.variableParameter or
super(ChannelPopulation, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ChannelPopulation'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.ionChannel is not None and 'ionChannel' not in already_processed:
already_processed.append('ionChannel')
showIndent(outfile, level)
outfile.write('ionChannel = "%s",\n' % (self.ionChannel,))
if self.segmentGroup is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
showIndent(outfile, level)
outfile.write('segmentGroup = "%s",\n' % (self.segmentGroup,))
if self.erev is not None and 'erev' not in already_processed:
already_processed.append('erev')
showIndent(outfile, level)
outfile.write('erev = "%s",\n' % (self.erev,))
if self.number is not None and 'number' not in already_processed:
already_processed.append('number')
showIndent(outfile, level)
outfile.write('number = %d,\n' % (self.number,))
if self.segment is not None and 'segment' not in already_processed:
already_processed.append('segment')
showIndent(outfile, level)
outfile.write('segment = "%s",\n' % (self.segment,))
super(ChannelPopulation, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ChannelPopulation, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('variableParameter=[\n')
level += 1
for variableParameter_ in self.variableParameter:
showIndent(outfile, level)
outfile.write('model_.VariableParameter(\n')
variableParameter_.exportLiteral(outfile, level, name_='VariableParameter')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('ionChannel', node)
if value is not None and 'ionChannel' not in already_processed:
already_processed.append('ionChannel')
self.ionChannel = value
self.validate_NmlId(self.ionChannel) # validate type NmlId
value = find_attr_value_('segmentGroup', node)
if value is not None and 'segmentGroup' not in already_processed:
already_processed.append('segmentGroup')
self.segmentGroup = value
self.validate_NmlId(self.segmentGroup) # validate type NmlId
value = find_attr_value_('erev', node)
if value is not None and 'erev' not in already_processed:
already_processed.append('erev')
self.erev = value
self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage
value = find_attr_value_('number', node)
if value is not None and 'number' not in already_processed:
already_processed.append('number')
try:
self.number = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
if self.number < 0:
raise_parse_error(node, 'Invalid NonNegativeInteger')
value = find_attr_value_('segment', node)
if value is not None and 'segment' not in already_processed:
already_processed.append('segment')
self.segment = value
self.validate_NmlId(self.segment) # validate type NmlId
super(ChannelPopulation, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'variableParameter':
obj_ = VariableParameter.factory()
obj_.build(child_)
self.variableParameter.append(obj_)
super(ChannelPopulation, self).buildChildren(child_, node, nodeName_, True)
# end class ChannelPopulation
class BiophysicalProperties(Standalone):
"""Standalone element which is usually inside a single cell, but could
be outside and referenced by id."""
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, membraneProperties=None, intracellularProperties=None, extracellularProperties=None):
super(BiophysicalProperties, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.membraneProperties = membraneProperties
self.intracellularProperties = intracellularProperties
self.extracellularProperties = extracellularProperties
def factory(*args_, **kwargs_):
if BiophysicalProperties.subclass:
return BiophysicalProperties.subclass(*args_, **kwargs_)
else:
return BiophysicalProperties(*args_, **kwargs_)
factory = staticmethod(factory)
def get_membraneProperties(self): return self.membraneProperties
def set_membraneProperties(self, membraneProperties): self.membraneProperties = membraneProperties
def get_intracellularProperties(self): return self.intracellularProperties
def set_intracellularProperties(self, intracellularProperties): self.intracellularProperties = intracellularProperties
def get_extracellularProperties(self): return self.extracellularProperties
def set_extracellularProperties(self, extracellularProperties): self.extracellularProperties = extracellularProperties
def export(self, outfile, level, namespace_='', name_='BiophysicalProperties', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='BiophysicalProperties')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='BiophysicalProperties'):
super(BiophysicalProperties, self).exportAttributes(outfile, level, already_processed, namespace_, name_='BiophysicalProperties')
def exportChildren(self, outfile, level, namespace_='', name_='BiophysicalProperties', fromsubclass_=False):
super(BiophysicalProperties, self).exportChildren(outfile, level, namespace_, name_, True)
if self.membraneProperties is not None:
self.membraneProperties.export(outfile, level, namespace_, name_='membraneProperties', )
if self.intracellularProperties is not None:
self.intracellularProperties.export(outfile, level, namespace_, name_='intracellularProperties')
if self.extracellularProperties is not None:
self.extracellularProperties.export(outfile, level, namespace_, name_='extracellularProperties')
def hasContent_(self):
if (
self.membraneProperties is not None or
self.intracellularProperties is not None or
self.extracellularProperties is not None or
super(BiophysicalProperties, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='BiophysicalProperties'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(BiophysicalProperties, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(BiophysicalProperties, self).exportLiteralChildren(outfile, level, name_)
if self.membraneProperties is not None:
showIndent(outfile, level)
outfile.write('membraneProperties=model_.MembraneProperties(\n')
self.membraneProperties.exportLiteral(outfile, level, name_='membraneProperties')
showIndent(outfile, level)
outfile.write('),\n')
if self.intracellularProperties is not None:
showIndent(outfile, level)
outfile.write('intracellularProperties=model_.IntracellularProperties(\n')
self.intracellularProperties.exportLiteral(outfile, level, name_='intracellularProperties')
showIndent(outfile, level)
outfile.write('),\n')
if self.extracellularProperties is not None:
showIndent(outfile, level)
outfile.write('extracellularProperties=model_.ExtracellularProperties(\n')
self.extracellularProperties.exportLiteral(outfile, level, name_='extracellularProperties')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(BiophysicalProperties, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'membraneProperties':
obj_ = MembraneProperties.factory()
obj_.build(child_)
self.set_membraneProperties(obj_)
elif nodeName_ == 'intracellularProperties':
obj_ = IntracellularProperties.factory()
obj_.build(child_)
self.set_intracellularProperties(obj_)
elif nodeName_ == 'extracellularProperties':
obj_ = ExtracellularProperties.factory()
obj_.build(child_)
self.set_extracellularProperties(obj_)
super(BiophysicalProperties, self).buildChildren(child_, node, nodeName_, True)
# end class BiophysicalProperties
class InhomogeneousParam(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, variable=None, metric=None, proximal=None, distal=None):
super(InhomogeneousParam, self).__init__(id, neuroLexId, )
self.variable = _cast(None, variable)
self.metric = _cast(None, metric)
self.proximal = proximal
self.distal = distal
def factory(*args_, **kwargs_):
if InhomogeneousParam.subclass:
return InhomogeneousParam.subclass(*args_, **kwargs_)
else:
return InhomogeneousParam(*args_, **kwargs_)
factory = staticmethod(factory)
def get_proximal(self): return self.proximal
def set_proximal(self, proximal): self.proximal = proximal
def get_distal(self): return self.distal
def set_distal(self, distal): self.distal = distal
def get_variable(self): return self.variable
def set_variable(self, variable): self.variable = variable
def get_metric(self): return self.metric
def set_metric(self, metric): self.metric = metric
def validate_Metric(self, value):
# Validate type Metric, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='InhomogeneousParam', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='InhomogeneousParam')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='InhomogeneousParam'):
super(InhomogeneousParam, self).exportAttributes(outfile, level, already_processed, namespace_, name_='InhomogeneousParam')
if self.variable is not None and 'variable' not in already_processed:
already_processed.append('variable')
outfile.write(' variable=%s' % (self.gds_format_string(quote_attrib(self.variable).encode(ExternalEncoding), input_name='variable'), ))
if self.metric is not None and 'metric' not in already_processed:
already_processed.append('metric')
outfile.write(' metric=%s' % (quote_attrib(self.metric), ))
def exportChildren(self, outfile, level, namespace_='', name_='InhomogeneousParam', fromsubclass_=False):
super(InhomogeneousParam, self).exportChildren(outfile, level, namespace_, name_, True)
if self.proximal is not None:
self.proximal.export(outfile, level, namespace_, name_='proximal')
if self.distal is not None:
self.distal.export(outfile, level, namespace_, name_='distal')
def hasContent_(self):
if (
self.proximal is not None or
self.distal is not None or
super(InhomogeneousParam, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='InhomogeneousParam'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.variable is not None and 'variable' not in already_processed:
already_processed.append('variable')
showIndent(outfile, level)
outfile.write('variable = "%s",\n' % (self.variable,))
if self.metric is not None and 'metric' not in already_processed:
already_processed.append('metric')
showIndent(outfile, level)
outfile.write('metric = "%s",\n' % (self.metric,))
super(InhomogeneousParam, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(InhomogeneousParam, self).exportLiteralChildren(outfile, level, name_)
if self.proximal is not None:
showIndent(outfile, level)
outfile.write('proximal=model_.ProximalDetails(\n')
self.proximal.exportLiteral(outfile, level, name_='proximal')
showIndent(outfile, level)
outfile.write('),\n')
if self.distal is not None:
showIndent(outfile, level)
outfile.write('distal=model_.DistalDetails(\n')
self.distal.exportLiteral(outfile, level, name_='distal')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('variable', node)
if value is not None and 'variable' not in already_processed:
already_processed.append('variable')
self.variable = value
value = find_attr_value_('metric', node)
if value is not None and 'metric' not in already_processed:
already_processed.append('metric')
self.metric = value
self.validate_Metric(self.metric) # validate type Metric
super(InhomogeneousParam, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'proximal':
obj_ = ProximalDetails.factory()
obj_.build(child_)
self.set_proximal(obj_)
elif nodeName_ == 'distal':
obj_ = DistalDetails.factory()
obj_.build(child_)
self.set_distal(obj_)
super(InhomogeneousParam, self).buildChildren(child_, node, nodeName_, True)
# end class InhomogeneousParam
class SegmentGroup(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, member=None, include=None, path=None, subTree=None, inhomogeneousParam=None):
super(SegmentGroup, self).__init__(id, neuroLexId, )
if member is None:
self.member = []
else:
self.member = member
if include is None:
self.include = []
else:
self.include = include
if path is None:
self.path = []
else:
self.path = path
if subTree is None:
self.subTree = []
else:
self.subTree = subTree
if inhomogeneousParam is None:
self.inhomogeneousParam = []
else:
self.inhomogeneousParam = inhomogeneousParam
def factory(*args_, **kwargs_):
if SegmentGroup.subclass:
return SegmentGroup.subclass(*args_, **kwargs_)
else:
return SegmentGroup(*args_, **kwargs_)
factory = staticmethod(factory)
def get_member(self): return self.member
def set_member(self, member): self.member = member
def add_member(self, value): self.member.append(value)
def insert_member(self, index, value): self.member[index] = value
def get_include(self): return self.include
def set_include(self, include): self.include = include
def add_include(self, value): self.include.append(value)
def insert_include(self, index, value): self.include[index] = value
def get_path(self): return self.path
def set_path(self, path): self.path = path
def add_path(self, value): self.path.append(value)
def insert_path(self, index, value): self.path[index] = value
def get_subTree(self): return self.subTree
def set_subTree(self, subTree): self.subTree = subTree
def add_subTree(self, value): self.subTree.append(value)
def insert_subTree(self, index, value): self.subTree[index] = value
def get_inhomogeneousParam(self): return self.inhomogeneousParam
def set_inhomogeneousParam(self, inhomogeneousParam): self.inhomogeneousParam = inhomogeneousParam
def add_inhomogeneousParam(self, value): self.inhomogeneousParam.append(value)
def insert_inhomogeneousParam(self, index, value): self.inhomogeneousParam[index] = value
def export(self, outfile, level, namespace_='', name_='SegmentGroup', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='SegmentGroup')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='SegmentGroup'):
super(SegmentGroup, self).exportAttributes(outfile, level, already_processed, namespace_, name_='SegmentGroup')
def exportChildren(self, outfile, level, namespace_='', name_='SegmentGroup', fromsubclass_=False):
super(SegmentGroup, self).exportChildren(outfile, level, namespace_, name_, True)
for member_ in self.member:
member_.export(outfile, level, namespace_, name_='member')
for include_ in self.include:
include_.export(outfile, level, namespace_, name_='include')
for path_ in self.path:
path_.export(outfile, level, namespace_, name_='path')
for subTree_ in self.subTree:
subTree_.export(outfile, level, namespace_, name_='subTree')
for inhomogeneousParam_ in self.inhomogeneousParam:
inhomogeneousParam_.export(outfile, level, namespace_, name_='inhomogeneousParam')
def hasContent_(self):
if (
self.member or
self.include or
self.path or
self.subTree or
self.inhomogeneousParam or
super(SegmentGroup, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='SegmentGroup'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(SegmentGroup, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(SegmentGroup, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('member=[\n')
level += 1
for member_ in self.member:
showIndent(outfile, level)
outfile.write('model_.Member(\n')
member_.exportLiteral(outfile, level, name_='Member')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('include=[\n')
level += 1
for include_ in self.include:
showIndent(outfile, level)
outfile.write('model_.Include(\n')
include_.exportLiteral(outfile, level, name_='Include')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('path=[\n')
level += 1
for path_ in self.path:
showIndent(outfile, level)
outfile.write('model_.Path(\n')
path_.exportLiteral(outfile, level, name_='Path')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('subTree=[\n')
level += 1
for subTree_ in self.subTree:
showIndent(outfile, level)
outfile.write('model_.SubTree(\n')
subTree_.exportLiteral(outfile, level, name_='SubTree')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('inhomogeneousParam=[\n')
level += 1
for inhomogeneousParam_ in self.inhomogeneousParam:
showIndent(outfile, level)
outfile.write('model_.InhomogeneousParam(\n')
inhomogeneousParam_.exportLiteral(outfile, level, name_='InhomogeneousParam')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(SegmentGroup, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'member':
obj_ = Member.factory()
obj_.build(child_)
self.member.append(obj_)
elif nodeName_ == 'include':
obj_ = Include.factory()
obj_.build(child_)
self.include.append(obj_)
elif nodeName_ == 'path':
obj_ = Path.factory()
obj_.build(child_)
self.path.append(obj_)
elif nodeName_ == 'subTree':
obj_ = SubTree.factory()
obj_.build(child_)
self.subTree.append(obj_)
elif nodeName_ == 'inhomogeneousParam':
obj_ = InhomogeneousParam.factory()
obj_.build(child_)
self.inhomogeneousParam.append(obj_)
super(SegmentGroup, self).buildChildren(child_, node, nodeName_, True)
# end class SegmentGroup
class Segment(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, name=None, parent=None, proximal=None, distal=None):
super(Segment, self).__init__(id, neuroLexId, )
self.name = _cast(None, name)
self.parent = parent
self.proximal = proximal
self.distal = distal
def factory(*args_, **kwargs_):
if Segment.subclass:
return Segment.subclass(*args_, **kwargs_)
else:
return Segment(*args_, **kwargs_)
factory = staticmethod(factory)
def get_parent(self): return self.parent
def set_parent(self, parent): self.parent = parent
def get_proximal(self): return self.proximal
def set_proximal(self, proximal): self.proximal = proximal
def get_distal(self): return self.distal
def set_distal(self, distal): self.distal = distal
def get_name(self): return self.name
def set_name(self, name): self.name = name
def export(self, outfile, level, namespace_='', name_='Segment', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Segment')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Segment'):
super(Segment, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Segment')
if self.name is not None and 'name' not in already_processed:
already_processed.append('name')
outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), ))
def exportChildren(self, outfile, level, namespace_='', name_='Segment', fromsubclass_=False):
super(Segment, self).exportChildren(outfile, level, namespace_, name_, True)
if self.parent is not None:
self.parent.export(outfile, level, namespace_, name_='parent')
if self.proximal is not None:
self.proximal.export(outfile, level, namespace_, name_='proximal')
if self.distal is not None:
self.distal.export(outfile, level, namespace_, name_='distal', )
def hasContent_(self):
if (
self.parent is not None or
self.proximal is not None or
self.distal is not None or
super(Segment, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Segment'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.name is not None and 'name' not in already_processed:
already_processed.append('name')
showIndent(outfile, level)
outfile.write('name = "%s",\n' % (self.name,))
super(Segment, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Segment, self).exportLiteralChildren(outfile, level, name_)
if self.parent is not None:
showIndent(outfile, level)
outfile.write('parent=model_.SegmentParent(\n')
self.parent.exportLiteral(outfile, level, name_='parent')
showIndent(outfile, level)
outfile.write('),\n')
if self.proximal is not None:
showIndent(outfile, level)
outfile.write('proximal=model_.Point3DWithDiam(\n')
self.proximal.exportLiteral(outfile, level, name_='proximal')
showIndent(outfile, level)
outfile.write('),\n')
if self.distal is not None:
showIndent(outfile, level)
outfile.write('distal=model_.Point3DWithDiam(\n')
self.distal.exportLiteral(outfile, level, name_='distal')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.append('name')
self.name = value
super(Segment, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'parent':
obj_ = SegmentParent.factory()
obj_.build(child_)
self.set_parent(obj_)
elif nodeName_ == 'proximal':
obj_ = Point3DWithDiam.factory()
obj_.build(child_)
self.set_proximal(obj_)
elif nodeName_ == 'distal':
obj_ = Point3DWithDiam.factory()
obj_.build(child_)
self.set_distal(obj_)
super(Segment, self).buildChildren(child_, node, nodeName_, True)
# end class Segment
class Morphology(Standalone):
"""Standalone element which is usually inside a single cell, but could
be outside and referenced by id."""
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, segment=None, segmentGroup=None):
super(Morphology, self).__init__(id, neuroLexId, metaid, notes, annotation, )
if segment is None:
self.segment = []
else:
self.segment = segment
if segmentGroup is None:
self.segmentGroup = []
else:
self.segmentGroup = segmentGroup
def factory(*args_, **kwargs_):
if Morphology.subclass:
return Morphology.subclass(*args_, **kwargs_)
else:
return Morphology(*args_, **kwargs_)
factory = staticmethod(factory)
def get_segment(self): return self.segment
def set_segment(self, segment): self.segment = segment
def add_segment(self, value): self.segment.append(value)
def insert_segment(self, index, value): self.segment[index] = value
def get_segmentGroup(self): return self.segmentGroup
def set_segmentGroup(self, segmentGroup): self.segmentGroup = segmentGroup
def add_segmentGroup(self, value): self.segmentGroup.append(value)
def insert_segmentGroup(self, index, value): self.segmentGroup[index] = value
def export(self, outfile, level, namespace_='', name_='Morphology', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Morphology')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Morphology'):
super(Morphology, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Morphology')
def exportChildren(self, outfile, level, namespace_='', name_='Morphology', fromsubclass_=False):
super(Morphology, self).exportChildren(outfile, level, namespace_, name_, True)
for segment_ in self.segment:
segment_.export(outfile, level, namespace_, name_='segment')
for segmentGroup_ in self.segmentGroup:
segmentGroup_.export(outfile, level, namespace_, name_='segmentGroup')
def hasContent_(self):
if (
self.segment or
self.segmentGroup or
super(Morphology, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Morphology'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(Morphology, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Morphology, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('segment=[\n')
level += 1
for segment_ in self.segment:
showIndent(outfile, level)
outfile.write('model_.Segment(\n')
segment_.exportLiteral(outfile, level, name_='Segment')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('segmentGroup=[\n')
level += 1
for segmentGroup_ in self.segmentGroup:
showIndent(outfile, level)
outfile.write('model_.SegmentGroup(\n')
segmentGroup_.exportLiteral(outfile, level, name_='SegmentGroup')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(Morphology, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'segment':
obj_ = Segment.factory()
obj_.build(child_)
self.segment.append(obj_)
elif nodeName_ == 'segmentGroup':
obj_ = SegmentGroup.factory()
obj_.build(child_)
self.segmentGroup.append(obj_)
super(Morphology, self).buildChildren(child_, node, nodeName_, True)
# end class Morphology
class AbstractCell(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, extensiontype_=None):
super(AbstractCell, self).__init__(id, neuroLexId, metaid, notes, annotation, extensiontype_, )
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if AbstractCell.subclass:
return AbstractCell.subclass(*args_, **kwargs_)
else:
return AbstractCell(*args_, **kwargs_)
factory = staticmethod(factory)
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='AbstractCell', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='AbstractCell')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='AbstractCell'):
super(AbstractCell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='AbstractCell')
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='AbstractCell', fromsubclass_=False):
super(AbstractCell, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(AbstractCell, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='AbstractCell'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(AbstractCell, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(AbstractCell, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
super(AbstractCell, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(AbstractCell, self).buildChildren(child_, node, nodeName_, True)
pass
# end class AbstractCell
class ConductanceBasedSynapse(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, erev=None, gbase=None, extensiontype_=None):
super(ConductanceBasedSynapse, self).__init__(id, neuroLexId, metaid, notes, annotation, extensiontype_, )
self.erev = _cast(None, erev)
self.gbase = _cast(None, gbase)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if ConductanceBasedSynapse.subclass:
return ConductanceBasedSynapse.subclass(*args_, **kwargs_)
else:
return ConductanceBasedSynapse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_erev(self): return self.erev
def set_erev(self, erev): self.erev = erev
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_gbase(self): return self.gbase
def set_gbase(self, gbase): self.gbase = gbase
def validate_Nml2Quantity_conductance(self, value):
# Validate type Nml2Quantity_conductance, a restriction on xs:string.
pass
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='ConductanceBasedSynapse', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConductanceBasedSynapse')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ConductanceBasedSynapse'):
super(ConductanceBasedSynapse, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ConductanceBasedSynapse')
if self.erev is not None and 'erev' not in already_processed:
already_processed.append('erev')
outfile.write(' erev=%s' % (quote_attrib(self.erev), ))
if self.gbase is not None and 'gbase' not in already_processed:
already_processed.append('gbase')
outfile.write(' gbase=%s' % (quote_attrib(self.gbase), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='ConductanceBasedSynapse', fromsubclass_=False):
super(ConductanceBasedSynapse, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(ConductanceBasedSynapse, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ConductanceBasedSynapse'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.erev is not None and 'erev' not in already_processed:
already_processed.append('erev')
showIndent(outfile, level)
outfile.write('erev = "%s",\n' % (self.erev,))
if self.gbase is not None and 'gbase' not in already_processed:
already_processed.append('gbase')
showIndent(outfile, level)
outfile.write('gbase = "%s",\n' % (self.gbase,))
super(ConductanceBasedSynapse, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ConductanceBasedSynapse, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('erev', node)
if value is not None and 'erev' not in already_processed:
already_processed.append('erev')
self.erev = value
self.validate_Nml2Quantity_voltage(self.erev) # validate type Nml2Quantity_voltage
value = find_attr_value_('gbase', node)
if value is not None and 'gbase' not in already_processed:
already_processed.append('gbase')
self.gbase = value
self.validate_Nml2Quantity_conductance(self.gbase) # validate type Nml2Quantity_conductance
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
super(ConductanceBasedSynapse, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ConductanceBasedSynapse, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ConductanceBasedSynapse
class Gate(Base):
subclass = None
superclass = Base
def __init__(self, id=None, neuroLexId=None, instances=1, type_=None, anytypeobjs_=None):
super(Gate, self).__init__(id, neuroLexId, )
self.instances = _cast(int, instances)
self.type_ = _cast(None, type_)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if Gate.subclass:
return Gate.subclass(*args_, **kwargs_)
else:
return Gate(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_instances(self): return self.instances
def set_instances(self, instances): self.instances = instances
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def validate_gateTypes(self, value):
# Validate type gateTypes, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='Gate', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Gate')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Gate'):
super(Gate, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Gate')
if self.instances is not None and 'instances' not in already_processed:
already_processed.append('instances')
outfile.write(' instances="%s"' % self.gds_format_integer(self.instances, input_name='instances'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (quote_attrib(self.type_), ))
def exportChildren(self, outfile, level, namespace_='', name_='Gate', fromsubclass_=False):
super(Gate, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(Gate, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Gate'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.instances is not None and 'instances' not in already_processed:
already_processed.append('instances')
showIndent(outfile, level)
outfile.write('instances = %d,\n' % (self.instances,))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
super(Gate, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Gate, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('instances', node)
if value is not None and 'instances' not in already_processed:
already_processed.append('instances')
try:
self.instances = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
self.validate_gateTypes(self.type_) # validate type gateTypes
super(Gate, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'Gate')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(Gate, self).buildChildren(child_, node, nodeName_, True)
# end class Gate
class IonChannel(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, conductance=None, type_=None, species=None, gate=None, gateHH=None):
super(IonChannel, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.conductance = _cast(None, conductance)
self.type_ = _cast(None, type_)
self.species = _cast(None, species)
if gate is None:
self.gate = []
else:
self.gate = gate
if gateHH is None:
self.gateHH = []
else:
self.gateHH = gateHH
def factory(*args_, **kwargs_):
if IonChannel.subclass:
return IonChannel.subclass(*args_, **kwargs_)
else:
return IonChannel(*args_, **kwargs_)
factory = staticmethod(factory)
def get_gate(self): return self.gate
def set_gate(self, gate): self.gate = gate
def add_gate(self, value): self.gate.append(value)
def insert_gate(self, index, value): self.gate[index] = value
def get_gateHH(self): return self.gateHH
def set_gateHH(self, gateHH): self.gateHH = gateHH
def add_gateHH(self, value): self.gateHH.append(value)
def insert_gateHH(self, index, value): self.gateHH[index] = value
def get_conductance(self): return self.conductance
def set_conductance(self, conductance): self.conductance = conductance
def validate_Nml2Quantity_conductance(self, value):
# Validate type Nml2Quantity_conductance, a restriction on xs:string.
pass
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def validate_channelTypes(self, value):
# Validate type channelTypes, a restriction on xs:string.
pass
def get_species(self): return self.species
def set_species(self, species): self.species = species
def validate_NmlId(self, value):
# Validate type NmlId, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='IonChannel', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IonChannel')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='IonChannel'):
super(IonChannel, self).exportAttributes(outfile, level, already_processed, namespace_, name_='IonChannel')
if self.conductance is not None and 'conductance' not in already_processed:
already_processed.append('conductance')
outfile.write(' conductance=%s' % (quote_attrib(self.conductance), ))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (quote_attrib(self.type_), ))
if self.species is not None and 'species' not in already_processed:
already_processed.append('species')
outfile.write(' species=%s' % (quote_attrib(self.species), ))
def exportChildren(self, outfile, level, namespace_='', name_='IonChannel', fromsubclass_=False):
super(IonChannel, self).exportChildren(outfile, level, namespace_, name_, True)
for gate_ in self.gate:
gate_.export(outfile, level, namespace_, name_='gate')
for gateHH_ in self.gateHH:
gateHH_.export(outfile, level, namespace_, name_='gateHH')
def hasContent_(self):
if (
self.gate or
self.gateHH or
super(IonChannel, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IonChannel'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.conductance is not None and 'conductance' not in already_processed:
already_processed.append('conductance')
showIndent(outfile, level)
outfile.write('conductance = "%s",\n' % (self.conductance,))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
if self.species is not None and 'species' not in already_processed:
already_processed.append('species')
showIndent(outfile, level)
outfile.write('species = "%s",\n' % (self.species,))
super(IonChannel, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(IonChannel, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('gate=[\n')
level += 1
for gate_ in self.gate:
showIndent(outfile, level)
outfile.write('model_.Gate(\n')
gate_.exportLiteral(outfile, level, name_='Gate')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('gateHH=[\n')
level += 1
for gateHH_ in self.gateHH:
showIndent(outfile, level)
outfile.write('model_.Gate(\n')
gateHH_.exportLiteral(outfile, level, name_='Gate')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('conductance', node)
if value is not None and 'conductance' not in already_processed:
already_processed.append('conductance')
self.conductance = value
self.validate_Nml2Quantity_conductance(self.conductance) # validate type Nml2Quantity_conductance
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
self.validate_channelTypes(self.type_) # validate type channelTypes
value = find_attr_value_('species', node)
if value is not None and 'species' not in already_processed:
already_processed.append('species')
self.species = value
self.validate_NmlId(self.species) # validate type NmlId
super(IonChannel, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'gate':
obj_ = Gate.factory()
obj_.build(child_)
self.gate.append(obj_)
elif nodeName_ == 'gateHH':
obj_ = Gate.factory()
obj_.build(child_)
self.gateHH.append(obj_)
super(IonChannel, self).buildChildren(child_, node, nodeName_, True)
# end class IonChannel
class neuroml(Standalone):
subclass = None
superclass = Standalone
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, include=None, extracellularProperties=None, intracellularProperties=None, morphology=None, ionChannel=None, expOneSynapse=None, expTwoSynapse=None, nmdaSynapse=None, stpSynapse=None, biophysicalProperties=None, cell=None, abstractCell=None, iafTauCell=None, iafCell=None, izhikevichCell=None, adExIaFCell=None, pulseGenerator=None, network=None):
super(neuroml, self).__init__(id, neuroLexId, metaid, notes, annotation, )
if include is None:
self.include = []
else:
self.include = include
if extracellularProperties is None:
self.extracellularProperties = []
else:
self.extracellularProperties = extracellularProperties
if intracellularProperties is None:
self.intracellularProperties = []
else:
self.intracellularProperties = intracellularProperties
if morphology is None:
self.morphology = []
else:
self.morphology = morphology
if ionChannel is None:
self.ionChannel = []
else:
self.ionChannel = ionChannel
if expOneSynapse is None:
self.expOneSynapse = []
else:
self.expOneSynapse = expOneSynapse
if expTwoSynapse is None:
self.expTwoSynapse = []
else:
self.expTwoSynapse = expTwoSynapse
if nmdaSynapse is None:
self.nmdaSynapse = []
else:
self.nmdaSynapse = nmdaSynapse
if stpSynapse is None:
self.stpSynapse = []
else:
self.stpSynapse = stpSynapse
if biophysicalProperties is None:
self.biophysicalProperties = []
else:
self.biophysicalProperties = biophysicalProperties
if cell is None:
self.cell = []
else:
self.cell = cell
if abstractCell is None:
self.abstractCell = []
else:
self.abstractCell = abstractCell
if iafTauCell is None:
self.iafTauCell = []
else:
self.iafTauCell = iafTauCell
if iafCell is None:
self.iafCell = []
else:
self.iafCell = iafCell
if izhikevichCell is None:
self.izhikevichCell = []
else:
self.izhikevichCell = izhikevichCell
if adExIaFCell is None:
self.adExIaFCell = []
else:
self.adExIaFCell = adExIaFCell
if pulseGenerator is None:
self.pulseGenerator = []
else:
self.pulseGenerator = pulseGenerator
if network is None:
self.network = []
else:
self.network = network
def factory(*args_, **kwargs_):
if neuroml.subclass:
return neuroml.subclass(*args_, **kwargs_)
else:
return neuroml(*args_, **kwargs_)
factory = staticmethod(factory)
def get_include(self): return self.include
def set_include(self, include): self.include = include
def add_include(self, value): self.include.append(value)
def insert_include(self, index, value): self.include[index] = value
def get_extracellularProperties(self): return self.extracellularProperties
def set_extracellularProperties(self, extracellularProperties): self.extracellularProperties = extracellularProperties
def add_extracellularProperties(self, value): self.extracellularProperties.append(value)
def insert_extracellularProperties(self, index, value): self.extracellularProperties[index] = value
def get_intracellularProperties(self): return self.intracellularProperties
def set_intracellularProperties(self, intracellularProperties): self.intracellularProperties = intracellularProperties
def add_intracellularProperties(self, value): self.intracellularProperties.append(value)
def insert_intracellularProperties(self, index, value): self.intracellularProperties[index] = value
def get_morphology(self): return self.morphology
def set_morphology(self, morphology): self.morphology = morphology
def add_morphology(self, value): self.morphology.append(value)
def insert_morphology(self, index, value): self.morphology[index] = value
def get_ionChannel(self): return self.ionChannel
def set_ionChannel(self, ionChannel): self.ionChannel = ionChannel
def add_ionChannel(self, value): self.ionChannel.append(value)
def insert_ionChannel(self, index, value): self.ionChannel[index] = value
def get_expOneSynapse(self): return self.expOneSynapse
def set_expOneSynapse(self, expOneSynapse): self.expOneSynapse = expOneSynapse
def add_expOneSynapse(self, value): self.expOneSynapse.append(value)
def insert_expOneSynapse(self, index, value): self.expOneSynapse[index] = value
def get_expTwoSynapse(self): return self.expTwoSynapse
def set_expTwoSynapse(self, expTwoSynapse): self.expTwoSynapse = expTwoSynapse
def add_expTwoSynapse(self, value): self.expTwoSynapse.append(value)
def insert_expTwoSynapse(self, index, value): self.expTwoSynapse[index] = value
def get_nmdaSynapse(self): return self.nmdaSynapse
def set_nmdaSynapse(self, nmdaSynapse): self.nmdaSynapse = nmdaSynapse
def add_nmdaSynapse(self, value): self.nmdaSynapse.append(value)
def insert_nmdaSynapse(self, index, value): self.nmdaSynapse[index] = value
def get_stpSynapse(self): return self.stpSynapse
def set_stpSynapse(self, stpSynapse): self.stpSynapse = stpSynapse
def add_stpSynapse(self, value): self.stpSynapse.append(value)
def insert_stpSynapse(self, index, value): self.stpSynapse[index] = value
def get_biophysicalProperties(self): return self.biophysicalProperties
def set_biophysicalProperties(self, biophysicalProperties): self.biophysicalProperties = biophysicalProperties
def add_biophysicalProperties(self, value): self.biophysicalProperties.append(value)
def insert_biophysicalProperties(self, index, value): self.biophysicalProperties[index] = value
def get_cell(self): return self.cell
def set_cell(self, cell): self.cell = cell
def add_cell(self, value): self.cell.append(value)
def insert_cell(self, index, value): self.cell[index] = value
def get_abstractCell(self): return self.abstractCell
def set_abstractCell(self, abstractCell): self.abstractCell = abstractCell
def add_abstractCell(self, value): self.abstractCell.append(value)
def insert_abstractCell(self, index, value): self.abstractCell[index] = value
def get_iafTauCell(self): return self.iafTauCell
def set_iafTauCell(self, iafTauCell): self.iafTauCell = iafTauCell
def add_iafTauCell(self, value): self.iafTauCell.append(value)
def insert_iafTauCell(self, index, value): self.iafTauCell[index] = value
def get_iafCell(self): return self.iafCell
def set_iafCell(self, iafCell): self.iafCell = iafCell
def add_iafCell(self, value): self.iafCell.append(value)
def insert_iafCell(self, index, value): self.iafCell[index] = value
def get_izhikevichCell(self): return self.izhikevichCell
def set_izhikevichCell(self, izhikevichCell): self.izhikevichCell = izhikevichCell
def add_izhikevichCell(self, value): self.izhikevichCell.append(value)
def insert_izhikevichCell(self, index, value): self.izhikevichCell[index] = value
def get_adExIaFCell(self): return self.adExIaFCell
def set_adExIaFCell(self, adExIaFCell): self.adExIaFCell = adExIaFCell
def add_adExIaFCell(self, value): self.adExIaFCell.append(value)
def insert_adExIaFCell(self, index, value): self.adExIaFCell[index] = value
def get_pulseGenerator(self): return self.pulseGenerator
def set_pulseGenerator(self, pulseGenerator): self.pulseGenerator = pulseGenerator
def add_pulseGenerator(self, value): self.pulseGenerator.append(value)
def insert_pulseGenerator(self, index, value): self.pulseGenerator[index] = value
def get_network(self): return self.network
def set_network(self, network): self.network = network
def add_network(self, value): self.network.append(value)
def insert_network(self, index, value): self.network[index] = value
def export(self, outfile, level, namespace_='', name_='neuroml', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='neuroml')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='neuroml'):
super(neuroml, self).exportAttributes(outfile, level, already_processed, namespace_, name_='neuroml')
def exportChildren(self, outfile, level, namespace_='', name_='neuroml', fromsubclass_=False):
super(neuroml, self).exportChildren(outfile, level, namespace_, name_, True)
for include_ in self.include:
include_.export(outfile, level, namespace_, name_='include')
for extracellularProperties_ in self.extracellularProperties:
extracellularProperties_.export(outfile, level, namespace_, name_='extracellularProperties')
for intracellularProperties_ in self.intracellularProperties:
intracellularProperties_.export(outfile, level, namespace_, name_='intracellularProperties')
for morphology_ in self.morphology:
morphology_.export(outfile, level, namespace_, name_='morphology')
for ionChannel_ in self.ionChannel:
ionChannel_.export(outfile, level, namespace_, name_='ionChannel')
for expOneSynapse_ in self.expOneSynapse:
expOneSynapse_.export(outfile, level, namespace_, name_='expOneSynapse')
for expTwoSynapse_ in self.expTwoSynapse:
expTwoSynapse_.export(outfile, level, namespace_, name_='expTwoSynapse')
for nmdaSynapse_ in self.nmdaSynapse:
nmdaSynapse_.export(outfile, level, namespace_, name_='nmdaSynapse')
for stpSynapse_ in self.stpSynapse:
stpSynapse_.export(outfile, level, namespace_, name_='stpSynapse')
for biophysicalProperties_ in self.biophysicalProperties:
biophysicalProperties_.export(outfile, level, namespace_, name_='biophysicalProperties')
for cell_ in self.cell:
cell_.export(outfile, level, namespace_, name_='cell')
for abstractCell_ in self.abstractCell:
abstractCell_.export(outfile, level, namespace_, name_='abstractCell')
for iafTauCell_ in self.iafTauCell:
iafTauCell_.export(outfile, level, namespace_, name_='iafTauCell')
for iafCell_ in self.iafCell:
iafCell_.export(outfile, level, namespace_, name_='iafCell')
for izhikevichCell_ in self.izhikevichCell:
izhikevichCell_.export(outfile, level, namespace_, name_='izhikevichCell')
for adExIaFCell_ in self.adExIaFCell:
adExIaFCell_.export(outfile, level, namespace_, name_='adExIaFCell')
for pulseGenerator_ in self.pulseGenerator:
pulseGenerator_.export(outfile, level, namespace_, name_='pulseGenerator')
for network_ in self.network:
network_.export(outfile, level, namespace_, name_='network')
def hasContent_(self):
if (
self.include or
self.extracellularProperties or
self.intracellularProperties or
self.morphology or
self.ionChannel or
self.expOneSynapse or
self.expTwoSynapse or
self.nmdaSynapse or
self.stpSynapse or
self.biophysicalProperties or
self.cell or
self.abstractCell or
self.iafTauCell or
self.iafCell or
self.izhikevichCell or
self.adExIaFCell or
self.pulseGenerator or
self.network or
super(neuroml, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='neuroml'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(neuroml, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(neuroml, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('include=[\n')
level += 1
for include_ in self.include:
showIndent(outfile, level)
outfile.write('model_.includeType(\n')
include_.exportLiteral(outfile, level, name_='includeType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('extracellularProperties=[\n')
level += 1
for extracellularProperties_ in self.extracellularProperties:
showIndent(outfile, level)
outfile.write('model_.ExtracellularProperties(\n')
extracellularProperties_.exportLiteral(outfile, level, name_='ExtracellularProperties')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('intracellularProperties=[\n')
level += 1
for intracellularProperties_ in self.intracellularProperties:
showIndent(outfile, level)
outfile.write('model_.IntracellularProperties(\n')
intracellularProperties_.exportLiteral(outfile, level, name_='IntracellularProperties')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('morphology=[\n')
level += 1
for morphology_ in self.morphology:
showIndent(outfile, level)
outfile.write('model_.Morphology(\n')
morphology_.exportLiteral(outfile, level, name_='Morphology')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('ionChannel=[\n')
level += 1
for ionChannel_ in self.ionChannel:
showIndent(outfile, level)
outfile.write('model_.IonChannel(\n')
ionChannel_.exportLiteral(outfile, level, name_='IonChannel')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('expOneSynapse=[\n')
level += 1
for expOneSynapse_ in self.expOneSynapse:
showIndent(outfile, level)
outfile.write('model_.ExpOneSynapse(\n')
expOneSynapse_.exportLiteral(outfile, level, name_='ExpOneSynapse')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('expTwoSynapse=[\n')
level += 1
for expTwoSynapse_ in self.expTwoSynapse:
showIndent(outfile, level)
outfile.write('model_.ExpTwoSynapse(\n')
expTwoSynapse_.exportLiteral(outfile, level, name_='ExpTwoSynapse')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('nmdaSynapse=[\n')
level += 1
for nmdaSynapse_ in self.nmdaSynapse:
showIndent(outfile, level)
outfile.write('model_.NmdaSynapse(\n')
nmdaSynapse_.exportLiteral(outfile, level, name_='NmdaSynapse')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('stpSynapse=[\n')
level += 1
for stpSynapse_ in self.stpSynapse:
showIndent(outfile, level)
outfile.write('model_.StpSynapse(\n')
stpSynapse_.exportLiteral(outfile, level, name_='StpSynapse')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('biophysicalProperties=[\n')
level += 1
for biophysicalProperties_ in self.biophysicalProperties:
showIndent(outfile, level)
outfile.write('model_.BiophysicalProperties(\n')
biophysicalProperties_.exportLiteral(outfile, level, name_='BiophysicalProperties')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('cell=[\n')
level += 1
for cell_ in self.cell:
showIndent(outfile, level)
outfile.write('model_.Cell(\n')
cell_.exportLiteral(outfile, level, name_='Cell')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('abstractCell=[\n')
level += 1
for abstractCell_ in self.abstractCell:
showIndent(outfile, level)
outfile.write('model_.AbstractCell(\n')
abstractCell_.exportLiteral(outfile, level, name_='AbstractCell')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('iafTauCell=[\n')
level += 1
for iafTauCell_ in self.iafTauCell:
showIndent(outfile, level)
outfile.write('model_.IaFTauCell(\n')
iafTauCell_.exportLiteral(outfile, level, name_='IaFTauCell')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('iafCell=[\n')
level += 1
for iafCell_ in self.iafCell:
showIndent(outfile, level)
outfile.write('model_.IaFCell(\n')
iafCell_.exportLiteral(outfile, level, name_='IaFCell')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('izhikevichCell=[\n')
level += 1
for izhikevichCell_ in self.izhikevichCell:
showIndent(outfile, level)
outfile.write('model_.IzhikevichCell(\n')
izhikevichCell_.exportLiteral(outfile, level, name_='IzhikevichCell')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('adExIaFCell=[\n')
level += 1
for adExIaFCell_ in self.adExIaFCell:
showIndent(outfile, level)
outfile.write('model_.AdExIaFCell(\n')
adExIaFCell_.exportLiteral(outfile, level, name_='AdExIaFCell')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('pulseGenerator=[\n')
level += 1
for pulseGenerator_ in self.pulseGenerator:
showIndent(outfile, level)
outfile.write('model_.PulseGenerator(\n')
pulseGenerator_.exportLiteral(outfile, level, name_='PulseGenerator')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('network=[\n')
level += 1
for network_ in self.network:
showIndent(outfile, level)
outfile.write('model_.Network(\n')
network_.exportLiteral(outfile, level, name_='Network')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(neuroml, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'include':
obj_ = includeType.factory()
obj_.build(child_)
self.include.append(obj_)
elif nodeName_ == 'extracellularProperties':
obj_ = ExtracellularProperties.factory()
obj_.build(child_)
self.extracellularProperties.append(obj_)
elif nodeName_ == 'intracellularProperties':
obj_ = IntracellularProperties.factory()
obj_.build(child_)
self.intracellularProperties.append(obj_)
elif nodeName_ == 'morphology':
obj_ = Morphology.factory()
obj_.build(child_)
self.morphology.append(obj_)
elif nodeName_ == 'ionChannel':
obj_ = IonChannel.factory()
obj_.build(child_)
self.ionChannel.append(obj_)
elif nodeName_ == 'expOneSynapse':
obj_ = ExpOneSynapse.factory()
obj_.build(child_)
self.expOneSynapse.append(obj_)
elif nodeName_ == 'expTwoSynapse':
class_obj_ = self.get_class_obj_(child_, ExpTwoSynapse)
obj_ = class_obj_.factory()
obj_.build(child_)
self.expTwoSynapse.append(obj_)
elif nodeName_ == 'nmdaSynapse':
obj_ = NmdaSynapse.factory()
obj_.build(child_)
self.nmdaSynapse.append(obj_)
elif nodeName_ == 'stpSynapse':
obj_ = StpSynapse.factory()
obj_.build(child_)
self.stpSynapse.append(obj_)
elif nodeName_ == 'biophysicalProperties':
obj_ = BiophysicalProperties.factory()
obj_.build(child_)
self.biophysicalProperties.append(obj_)
elif nodeName_ == 'cell':
obj_ = Cell.factory()
obj_.build(child_)
self.cell.append(obj_)
elif nodeName_ == 'abstractCell':
class_obj_ = self.get_class_obj_(child_, AbstractCell)
obj_ = class_obj_.factory()
obj_.build(child_)
self.abstractCell.append(obj_)
elif nodeName_ == 'iafTauCell':
obj_ = IaFTauCell.factory()
obj_.build(child_)
self.iafTauCell.append(obj_)
elif nodeName_ == 'iafCell':
obj_ = IaFCell.factory()
obj_.build(child_)
self.iafCell.append(obj_)
elif nodeName_ == 'izhikevichCell':
obj_ = IzhikevichCell.factory()
obj_.build(child_)
self.izhikevichCell.append(obj_)
elif nodeName_ == 'adExIaFCell':
obj_ = AdExIaFCell.factory()
obj_.build(child_)
self.adExIaFCell.append(obj_)
elif nodeName_ == 'pulseGenerator':
obj_ = PulseGenerator.factory()
obj_.build(child_)
self.pulseGenerator.append(obj_)
elif nodeName_ == 'network':
obj_ = Network.factory()
obj_.build(child_)
self.network.append(obj_)
super(neuroml, self).buildChildren(child_, node, nodeName_, True)
# end class neuroml
class Cell(AbstractCell):
"""Should only be used if morphology element is outside the cell. This
points to the id of the morphologyShould only be used if
biophysicalProperties element is outside the cell. This points
to the id of the biophysicalProperties"""
subclass = None
superclass = AbstractCell
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, biophysicalProperties_attr=None, morphology_attr=None, morphology=None, biophysicalProperties=None):
super(Cell, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.biophysicalProperties_attr = _cast(None, biophysicalProperties_attr)
self.morphology_attr = _cast(None, morphology_attr)
self.morphology = morphology
self.biophysicalProperties = biophysicalProperties
def factory(*args_, **kwargs_):
if Cell.subclass:
return Cell.subclass(*args_, **kwargs_)
else:
return Cell(*args_, **kwargs_)
factory = staticmethod(factory)
def get_morphology(self): return self.morphology
def set_morphology(self, morphology): self.morphology = morphology
def get_biophysicalProperties(self): return self.biophysicalProperties
def set_biophysicalProperties(self, biophysicalProperties): self.biophysicalProperties = biophysicalProperties
def get_biophysicalProperties_attr(self): return self.biophysicalProperties_attr
def set_biophysicalProperties_attr(self, biophysicalProperties_attr): self.biophysicalProperties_attr = biophysicalProperties_attr
def get_morphology_attr(self): return self.morphology_attr
def set_morphology_attr(self, morphology_attr): self.morphology_attr = morphology_attr
def export(self, outfile, level, namespace_='', name_='Cell', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Cell')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='Cell'):
super(Cell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Cell')
if self.biophysicalProperties_attr is not None and 'biophysicalProperties_attr' not in already_processed:
already_processed.append('biophysicalProperties_attr')
outfile.write(' biophysicalProperties_attr=%s' % (self.gds_format_string(quote_attrib(self.biophysicalProperties_attr).encode(ExternalEncoding), input_name='biophysicalProperties_attr'), ))
if self.morphology_attr is not None and 'morphology_attr' not in already_processed:
already_processed.append('morphology_attr')
outfile.write(' morphology_attr=%s' % (self.gds_format_string(quote_attrib(self.morphology_attr).encode(ExternalEncoding), input_name='morphology_attr'), ))
def exportChildren(self, outfile, level, namespace_='', name_='Cell', fromsubclass_=False):
super(Cell, self).exportChildren(outfile, level, namespace_, name_, True)
if self.morphology is not None:
self.morphology.export(outfile, level, namespace_, name_='morphology')
if self.biophysicalProperties is not None:
self.biophysicalProperties.export(outfile, level, namespace_, name_='biophysicalProperties')
def hasContent_(self):
if (
self.morphology is not None or
self.biophysicalProperties is not None or
super(Cell, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Cell'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.biophysicalProperties_attr is not None and 'biophysicalProperties_attr' not in already_processed:
already_processed.append('biophysicalProperties_attr')
showIndent(outfile, level)
outfile.write('biophysicalProperties_attr = "%s",\n' % (self.biophysicalProperties_attr,))
if self.morphology_attr is not None and 'morphology_attr' not in already_processed:
already_processed.append('morphology_attr')
showIndent(outfile, level)
outfile.write('morphology_attr = "%s",\n' % (self.morphology_attr,))
super(Cell, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Cell, self).exportLiteralChildren(outfile, level, name_)
if self.morphology is not None:
showIndent(outfile, level)
outfile.write('morphology=model_.Morphology(\n')
self.morphology.exportLiteral(outfile, level, name_='morphology')
showIndent(outfile, level)
outfile.write('),\n')
if self.biophysicalProperties is not None:
showIndent(outfile, level)
outfile.write('biophysicalProperties=model_.BiophysicalProperties(\n')
self.biophysicalProperties.exportLiteral(outfile, level, name_='biophysicalProperties')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('biophysicalProperties_attr', node)
if value is not None and 'biophysicalProperties_attr' not in already_processed:
already_processed.append('biophysicalProperties_attr')
self.biophysicalProperties_attr = value
value = find_attr_value_('morphology_attr', node)
if value is not None and 'morphology_attr' not in already_processed:
already_processed.append('morphology_attr')
self.morphology_attr = value
super(Cell, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'morphology':
obj_ = Morphology.factory()
obj_.build(child_)
self.set_morphology(obj_)
elif nodeName_ == 'biophysicalProperties':
obj_ = BiophysicalProperties.factory()
obj_.build(child_)
self.set_biophysicalProperties(obj_)
super(Cell, self).buildChildren(child_, node, nodeName_, True)
# end class Cell
class AdExIaFCell(AbstractCell):
subclass = None
superclass = AbstractCell
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, reset=None, EL=None, C=None, b=None, Idel=None, Idur=None, VT=None, delT=None, a=None, thresh=None, gL=None, Iamp=None, tauw=None):
super(AdExIaFCell, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.reset = _cast(None, reset)
self.EL = _cast(None, EL)
self.C = _cast(None, C)
self.b = _cast(None, b)
self.Idel = _cast(None, Idel)
self.Idur = _cast(None, Idur)
self.VT = _cast(None, VT)
self.delT = _cast(None, delT)
self.a = _cast(None, a)
self.thresh = _cast(None, thresh)
self.gL = _cast(None, gL)
self.Iamp = _cast(None, Iamp)
self.tauw = _cast(None, tauw)
pass
def factory(*args_, **kwargs_):
if AdExIaFCell.subclass:
return AdExIaFCell.subclass(*args_, **kwargs_)
else:
return AdExIaFCell(*args_, **kwargs_)
factory = staticmethod(factory)
def get_reset(self): return self.reset
def set_reset(self, reset): self.reset = reset
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_EL(self): return self.EL
def set_EL(self, EL): self.EL = EL
def get_C(self): return self.C
def set_C(self, C): self.C = C
def validate_Nml2Quantity_capacitance(self, value):
# Validate type Nml2Quantity_capacitance, a restriction on xs:string.
pass
def get_b(self): return self.b
def set_b(self, b): self.b = b
def validate_Nml2Quantity_current(self, value):
# Validate type Nml2Quantity_current, a restriction on xs:string.
pass
def get_Idel(self): return self.Idel
def set_Idel(self, Idel): self.Idel = Idel
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_Idur(self): return self.Idur
def set_Idur(self, Idur): self.Idur = Idur
def get_VT(self): return self.VT
def set_VT(self, VT): self.VT = VT
def get_delT(self): return self.delT
def set_delT(self, delT): self.delT = delT
def get_a(self): return self.a
def set_a(self, a): self.a = a
def validate_Nml2Quantity_conductance(self, value):
# Validate type Nml2Quantity_conductance, a restriction on xs:string.
pass
def get_thresh(self): return self.thresh
def set_thresh(self, thresh): self.thresh = thresh
def get_gL(self): return self.gL
def set_gL(self, gL): self.gL = gL
def get_Iamp(self): return self.Iamp
def set_Iamp(self, Iamp): self.Iamp = Iamp
def get_tauw(self): return self.tauw
def set_tauw(self, tauw): self.tauw = tauw
def export(self, outfile, level, namespace_='', name_='AdExIaFCell', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='AdExIaFCell')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='AdExIaFCell'):
super(AdExIaFCell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='AdExIaFCell')
if self.reset is not None and 'reset' not in already_processed:
already_processed.append('reset')
outfile.write(' reset=%s' % (quote_attrib(self.reset), ))
if self.EL is not None and 'EL' not in already_processed:
already_processed.append('EL')
outfile.write(' EL=%s' % (quote_attrib(self.EL), ))
if self.C is not None and 'C' not in already_processed:
already_processed.append('C')
outfile.write(' C=%s' % (quote_attrib(self.C), ))
if self.b is not None and 'b' not in already_processed:
already_processed.append('b')
outfile.write(' b=%s' % (quote_attrib(self.b), ))
if self.Idel is not None and 'Idel' not in already_processed:
already_processed.append('Idel')
outfile.write(' Idel=%s' % (quote_attrib(self.Idel), ))
if self.Idur is not None and 'Idur' not in already_processed:
already_processed.append('Idur')
outfile.write(' Idur=%s' % (quote_attrib(self.Idur), ))
if self.VT is not None and 'VT' not in already_processed:
already_processed.append('VT')
outfile.write(' VT=%s' % (quote_attrib(self.VT), ))
if self.delT is not None and 'delT' not in already_processed:
already_processed.append('delT')
outfile.write(' delT=%s' % (quote_attrib(self.delT), ))
if self.a is not None and 'a' not in already_processed:
already_processed.append('a')
outfile.write(' a=%s' % (quote_attrib(self.a), ))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
outfile.write(' thresh=%s' % (quote_attrib(self.thresh), ))
if self.gL is not None and 'gL' not in already_processed:
already_processed.append('gL')
outfile.write(' gL=%s' % (quote_attrib(self.gL), ))
if self.Iamp is not None and 'Iamp' not in already_processed:
already_processed.append('Iamp')
outfile.write(' Iamp=%s' % (quote_attrib(self.Iamp), ))
if self.tauw is not None and 'tauw' not in already_processed:
already_processed.append('tauw')
outfile.write(' tauw=%s' % (quote_attrib(self.tauw), ))
def exportChildren(self, outfile, level, namespace_='', name_='AdExIaFCell', fromsubclass_=False):
super(AdExIaFCell, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(AdExIaFCell, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='AdExIaFCell'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.reset is not None and 'reset' not in already_processed:
already_processed.append('reset')
showIndent(outfile, level)
outfile.write('reset = "%s",\n' % (self.reset,))
if self.EL is not None and 'EL' not in already_processed:
already_processed.append('EL')
showIndent(outfile, level)
outfile.write('EL = "%s",\n' % (self.EL,))
if self.C is not None and 'C' not in already_processed:
already_processed.append('C')
showIndent(outfile, level)
outfile.write('C = "%s",\n' % (self.C,))
if self.b is not None and 'b' not in already_processed:
already_processed.append('b')
showIndent(outfile, level)
outfile.write('b = "%s",\n' % (self.b,))
if self.Idel is not None and 'Idel' not in already_processed:
already_processed.append('Idel')
showIndent(outfile, level)
outfile.write('Idel = "%s",\n' % (self.Idel,))
if self.Idur is not None and 'Idur' not in already_processed:
already_processed.append('Idur')
showIndent(outfile, level)
outfile.write('Idur = "%s",\n' % (self.Idur,))
if self.VT is not None and 'VT' not in already_processed:
already_processed.append('VT')
showIndent(outfile, level)
outfile.write('VT = "%s",\n' % (self.VT,))
if self.delT is not None and 'delT' not in already_processed:
already_processed.append('delT')
showIndent(outfile, level)
outfile.write('delT = "%s",\n' % (self.delT,))
if self.a is not None and 'a' not in already_processed:
already_processed.append('a')
showIndent(outfile, level)
outfile.write('a = "%s",\n' % (self.a,))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
showIndent(outfile, level)
outfile.write('thresh = "%s",\n' % (self.thresh,))
if self.gL is not None and 'gL' not in already_processed:
already_processed.append('gL')
showIndent(outfile, level)
outfile.write('gL = "%s",\n' % (self.gL,))
if self.Iamp is not None and 'Iamp' not in already_processed:
already_processed.append('Iamp')
showIndent(outfile, level)
outfile.write('Iamp = "%s",\n' % (self.Iamp,))
if self.tauw is not None and 'tauw' not in already_processed:
already_processed.append('tauw')
showIndent(outfile, level)
outfile.write('tauw = "%s",\n' % (self.tauw,))
super(AdExIaFCell, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(AdExIaFCell, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('reset', node)
if value is not None and 'reset' not in already_processed:
already_processed.append('reset')
self.reset = value
self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage
value = find_attr_value_('EL', node)
if value is not None and 'EL' not in already_processed:
already_processed.append('EL')
self.EL = value
self.validate_Nml2Quantity_voltage(self.EL) # validate type Nml2Quantity_voltage
value = find_attr_value_('C', node)
if value is not None and 'C' not in already_processed:
already_processed.append('C')
self.C = value
self.validate_Nml2Quantity_capacitance(self.C) # validate type Nml2Quantity_capacitance
value = find_attr_value_('b', node)
if value is not None and 'b' not in already_processed:
already_processed.append('b')
self.b = value
self.validate_Nml2Quantity_current(self.b) # validate type Nml2Quantity_current
value = find_attr_value_('Idel', node)
if value is not None and 'Idel' not in already_processed:
already_processed.append('Idel')
self.Idel = value
self.validate_Nml2Quantity_time(self.Idel) # validate type Nml2Quantity_time
value = find_attr_value_('Idur', node)
if value is not None and 'Idur' not in already_processed:
already_processed.append('Idur')
self.Idur = value
self.validate_Nml2Quantity_time(self.Idur) # validate type Nml2Quantity_time
value = find_attr_value_('VT', node)
if value is not None and 'VT' not in already_processed:
already_processed.append('VT')
self.VT = value
self.validate_Nml2Quantity_voltage(self.VT) # validate type Nml2Quantity_voltage
value = find_attr_value_('delT', node)
if value is not None and 'delT' not in already_processed:
already_processed.append('delT')
self.delT = value
self.validate_Nml2Quantity_voltage(self.delT) # validate type Nml2Quantity_voltage
value = find_attr_value_('a', node)
if value is not None and 'a' not in already_processed:
already_processed.append('a')
self.a = value
self.validate_Nml2Quantity_conductance(self.a) # validate type Nml2Quantity_conductance
value = find_attr_value_('thresh', node)
if value is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
self.thresh = value
self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage
value = find_attr_value_('gL', node)
if value is not None and 'gL' not in already_processed:
already_processed.append('gL')
self.gL = value
self.validate_Nml2Quantity_conductance(self.gL) # validate type Nml2Quantity_conductance
value = find_attr_value_('Iamp', node)
if value is not None and 'Iamp' not in already_processed:
already_processed.append('Iamp')
self.Iamp = value
self.validate_Nml2Quantity_current(self.Iamp) # validate type Nml2Quantity_current
value = find_attr_value_('tauw', node)
if value is not None and 'tauw' not in already_processed:
already_processed.append('tauw')
self.tauw = value
self.validate_Nml2Quantity_time(self.tauw) # validate type Nml2Quantity_time
super(AdExIaFCell, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(AdExIaFCell, self).buildChildren(child_, node, nodeName_, True)
pass
# end class AdExIaFCell
class IzhikevichCell(AbstractCell):
subclass = None
superclass = AbstractCell
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, a=None, Idel=None, c=None, b=None, d=None, Idur=None, v0=None, thresh=None, Iamp=None):
super(IzhikevichCell, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.a = _cast(None, a)
self.Idel = _cast(None, Idel)
self.c = _cast(None, c)
self.b = _cast(None, b)
self.d = _cast(None, d)
self.Idur = _cast(None, Idur)
self.v0 = _cast(None, v0)
self.thresh = _cast(None, thresh)
self.Iamp = _cast(None, Iamp)
pass
def factory(*args_, **kwargs_):
if IzhikevichCell.subclass:
return IzhikevichCell.subclass(*args_, **kwargs_)
else:
return IzhikevichCell(*args_, **kwargs_)
factory = staticmethod(factory)
def get_a(self): return self.a
def set_a(self, a): self.a = a
def validate_Nml2Quantity_none(self, value):
# Validate type Nml2Quantity_none, a restriction on xs:string.
pass
def get_Idel(self): return self.Idel
def set_Idel(self, Idel): self.Idel = Idel
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_c(self): return self.c
def set_c(self, c): self.c = c
def get_b(self): return self.b
def set_b(self, b): self.b = b
def get_d(self): return self.d
def set_d(self, d): self.d = d
def get_Idur(self): return self.Idur
def set_Idur(self, Idur): self.Idur = Idur
def get_v0(self): return self.v0
def set_v0(self, v0): self.v0 = v0
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_thresh(self): return self.thresh
def set_thresh(self, thresh): self.thresh = thresh
def get_Iamp(self): return self.Iamp
def set_Iamp(self, Iamp): self.Iamp = Iamp
def export(self, outfile, level, namespace_='', name_='IzhikevichCell', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IzhikevichCell')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='IzhikevichCell'):
super(IzhikevichCell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='IzhikevichCell')
if self.a is not None and 'a' not in already_processed:
already_processed.append('a')
outfile.write(' a=%s' % (quote_attrib(self.a), ))
if self.Idel is not None and 'Idel' not in already_processed:
already_processed.append('Idel')
outfile.write(' Idel=%s' % (quote_attrib(self.Idel), ))
if self.c is not None and 'c' not in already_processed:
already_processed.append('c')
outfile.write(' c=%s' % (quote_attrib(self.c), ))
if self.b is not None and 'b' not in already_processed:
already_processed.append('b')
outfile.write(' b=%s' % (quote_attrib(self.b), ))
if self.d is not None and 'd' not in already_processed:
already_processed.append('d')
outfile.write(' d=%s' % (quote_attrib(self.d), ))
if self.Idur is not None and 'Idur' not in already_processed:
already_processed.append('Idur')
outfile.write(' Idur=%s' % (quote_attrib(self.Idur), ))
if self.v0 is not None and 'v0' not in already_processed:
already_processed.append('v0')
outfile.write(' v0=%s' % (quote_attrib(self.v0), ))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
outfile.write(' thresh=%s' % (quote_attrib(self.thresh), ))
if self.Iamp is not None and 'Iamp' not in already_processed:
already_processed.append('Iamp')
outfile.write(' Iamp=%s' % (quote_attrib(self.Iamp), ))
def exportChildren(self, outfile, level, namespace_='', name_='IzhikevichCell', fromsubclass_=False):
super(IzhikevichCell, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(IzhikevichCell, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IzhikevichCell'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.a is not None and 'a' not in already_processed:
already_processed.append('a')
showIndent(outfile, level)
outfile.write('a = "%s",\n' % (self.a,))
if self.Idel is not None and 'Idel' not in already_processed:
already_processed.append('Idel')
showIndent(outfile, level)
outfile.write('Idel = "%s",\n' % (self.Idel,))
if self.c is not None and 'c' not in already_processed:
already_processed.append('c')
showIndent(outfile, level)
outfile.write('c = "%s",\n' % (self.c,))
if self.b is not None and 'b' not in already_processed:
already_processed.append('b')
showIndent(outfile, level)
outfile.write('b = "%s",\n' % (self.b,))
if self.d is not None and 'd' not in already_processed:
already_processed.append('d')
showIndent(outfile, level)
outfile.write('d = "%s",\n' % (self.d,))
if self.Idur is not None and 'Idur' not in already_processed:
already_processed.append('Idur')
showIndent(outfile, level)
outfile.write('Idur = "%s",\n' % (self.Idur,))
if self.v0 is not None and 'v0' not in already_processed:
already_processed.append('v0')
showIndent(outfile, level)
outfile.write('v0 = "%s",\n' % (self.v0,))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
showIndent(outfile, level)
outfile.write('thresh = "%s",\n' % (self.thresh,))
if self.Iamp is not None and 'Iamp' not in already_processed:
already_processed.append('Iamp')
showIndent(outfile, level)
outfile.write('Iamp = "%s",\n' % (self.Iamp,))
super(IzhikevichCell, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(IzhikevichCell, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('a', node)
if value is not None and 'a' not in already_processed:
already_processed.append('a')
self.a = value
self.validate_Nml2Quantity_none(self.a) # validate type Nml2Quantity_none
value = find_attr_value_('Idel', node)
if value is not None and 'Idel' not in already_processed:
already_processed.append('Idel')
self.Idel = value
self.validate_Nml2Quantity_time(self.Idel) # validate type Nml2Quantity_time
value = find_attr_value_('c', node)
if value is not None and 'c' not in already_processed:
already_processed.append('c')
self.c = value
self.validate_Nml2Quantity_none(self.c) # validate type Nml2Quantity_none
value = find_attr_value_('b', node)
if value is not None and 'b' not in already_processed:
already_processed.append('b')
self.b = value
self.validate_Nml2Quantity_none(self.b) # validate type Nml2Quantity_none
value = find_attr_value_('d', node)
if value is not None and 'd' not in already_processed:
already_processed.append('d')
self.d = value
self.validate_Nml2Quantity_none(self.d) # validate type Nml2Quantity_none
value = find_attr_value_('Idur', node)
if value is not None and 'Idur' not in already_processed:
already_processed.append('Idur')
self.Idur = value
self.validate_Nml2Quantity_time(self.Idur) # validate type Nml2Quantity_time
value = find_attr_value_('v0', node)
if value is not None and 'v0' not in already_processed:
already_processed.append('v0')
self.v0 = value
self.validate_Nml2Quantity_voltage(self.v0) # validate type Nml2Quantity_voltage
value = find_attr_value_('thresh', node)
if value is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
self.thresh = value
self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage
value = find_attr_value_('Iamp', node)
if value is not None and 'Iamp' not in already_processed:
already_processed.append('Iamp')
self.Iamp = value
self.validate_Nml2Quantity_none(self.Iamp) # validate type Nml2Quantity_none
super(IzhikevichCell, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(IzhikevichCell, self).buildChildren(child_, node, nodeName_, True)
pass
# end class IzhikevichCell
class IaFCell(AbstractCell):
subclass = None
superclass = AbstractCell
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, reset=None, C=None, thresh=None, leakConductance=None, leakReversal=None):
super(IaFCell, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.reset = _cast(None, reset)
self.C = _cast(None, C)
self.thresh = _cast(None, thresh)
self.leakConductance = _cast(None, leakConductance)
self.leakReversal = _cast(None, leakReversal)
pass
def factory(*args_, **kwargs_):
if IaFCell.subclass:
return IaFCell.subclass(*args_, **kwargs_)
else:
return IaFCell(*args_, **kwargs_)
factory = staticmethod(factory)
def get_reset(self): return self.reset
def set_reset(self, reset): self.reset = reset
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_C(self): return self.C
def set_C(self, C): self.C = C
def validate_Nml2Quantity_capacitance(self, value):
# Validate type Nml2Quantity_capacitance, a restriction on xs:string.
pass
def get_thresh(self): return self.thresh
def set_thresh(self, thresh): self.thresh = thresh
def get_leakConductance(self): return self.leakConductance
def set_leakConductance(self, leakConductance): self.leakConductance = leakConductance
def validate_Nml2Quantity_conductance(self, value):
# Validate type Nml2Quantity_conductance, a restriction on xs:string.
pass
def get_leakReversal(self): return self.leakReversal
def set_leakReversal(self, leakReversal): self.leakReversal = leakReversal
def export(self, outfile, level, namespace_='', name_='IaFCell', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IaFCell')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='IaFCell'):
super(IaFCell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='IaFCell')
if self.reset is not None and 'reset' not in already_processed:
already_processed.append('reset')
outfile.write(' reset=%s' % (quote_attrib(self.reset), ))
if self.C is not None and 'C' not in already_processed:
already_processed.append('C')
outfile.write(' C=%s' % (quote_attrib(self.C), ))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
outfile.write(' thresh=%s' % (quote_attrib(self.thresh), ))
if self.leakConductance is not None and 'leakConductance' not in already_processed:
already_processed.append('leakConductance')
outfile.write(' leakConductance=%s' % (quote_attrib(self.leakConductance), ))
if self.leakReversal is not None and 'leakReversal' not in already_processed:
already_processed.append('leakReversal')
outfile.write(' leakReversal=%s' % (quote_attrib(self.leakReversal), ))
def exportChildren(self, outfile, level, namespace_='', name_='IaFCell', fromsubclass_=False):
super(IaFCell, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(IaFCell, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IaFCell'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.reset is not None and 'reset' not in already_processed:
already_processed.append('reset')
showIndent(outfile, level)
outfile.write('reset = "%s",\n' % (self.reset,))
if self.C is not None and 'C' not in already_processed:
already_processed.append('C')
showIndent(outfile, level)
outfile.write('C = "%s",\n' % (self.C,))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
showIndent(outfile, level)
outfile.write('thresh = "%s",\n' % (self.thresh,))
if self.leakConductance is not None and 'leakConductance' not in already_processed:
already_processed.append('leakConductance')
showIndent(outfile, level)
outfile.write('leakConductance = "%s",\n' % (self.leakConductance,))
if self.leakReversal is not None and 'leakReversal' not in already_processed:
already_processed.append('leakReversal')
showIndent(outfile, level)
outfile.write('leakReversal = "%s",\n' % (self.leakReversal,))
super(IaFCell, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(IaFCell, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('reset', node)
if value is not None and 'reset' not in already_processed:
already_processed.append('reset')
self.reset = value
self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage
value = find_attr_value_('C', node)
if value is not None and 'C' not in already_processed:
already_processed.append('C')
self.C = value
self.validate_Nml2Quantity_capacitance(self.C) # validate type Nml2Quantity_capacitance
value = find_attr_value_('thresh', node)
if value is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
self.thresh = value
self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage
value = find_attr_value_('leakConductance', node)
if value is not None and 'leakConductance' not in already_processed:
already_processed.append('leakConductance')
self.leakConductance = value
self.validate_Nml2Quantity_conductance(self.leakConductance) # validate type Nml2Quantity_conductance
value = find_attr_value_('leakReversal', node)
if value is not None and 'leakReversal' not in already_processed:
already_processed.append('leakReversal')
self.leakReversal = value
self.validate_Nml2Quantity_voltage(self.leakReversal) # validate type Nml2Quantity_voltage
super(IaFCell, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(IaFCell, self).buildChildren(child_, node, nodeName_, True)
pass
# end class IaFCell
class IaFTauCell(AbstractCell):
subclass = None
superclass = AbstractCell
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, reset=None, tau=None, thresh=None, leakReversal=None):
super(IaFTauCell, self).__init__(id, neuroLexId, metaid, notes, annotation, )
self.reset = _cast(None, reset)
self.tau = _cast(None, tau)
self.thresh = _cast(None, thresh)
self.leakReversal = _cast(None, leakReversal)
pass
def factory(*args_, **kwargs_):
if IaFTauCell.subclass:
return IaFTauCell.subclass(*args_, **kwargs_)
else:
return IaFTauCell(*args_, **kwargs_)
factory = staticmethod(factory)
def get_reset(self): return self.reset
def set_reset(self, reset): self.reset = reset
def validate_Nml2Quantity_voltage(self, value):
# Validate type Nml2Quantity_voltage, a restriction on xs:string.
pass
def get_tau(self): return self.tau
def set_tau(self, tau): self.tau = tau
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_thresh(self): return self.thresh
def set_thresh(self, thresh): self.thresh = thresh
def get_leakReversal(self): return self.leakReversal
def set_leakReversal(self, leakReversal): self.leakReversal = leakReversal
def export(self, outfile, level, namespace_='', name_='IaFTauCell', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IaFTauCell')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='IaFTauCell'):
super(IaFTauCell, self).exportAttributes(outfile, level, already_processed, namespace_, name_='IaFTauCell')
if self.reset is not None and 'reset' not in already_processed:
already_processed.append('reset')
outfile.write(' reset=%s' % (quote_attrib(self.reset), ))
if self.tau is not None and 'tau' not in already_processed:
already_processed.append('tau')
outfile.write(' tau=%s' % (quote_attrib(self.tau), ))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
outfile.write(' thresh=%s' % (quote_attrib(self.thresh), ))
if self.leakReversal is not None and 'leakReversal' not in already_processed:
already_processed.append('leakReversal')
outfile.write(' leakReversal=%s' % (quote_attrib(self.leakReversal), ))
def exportChildren(self, outfile, level, namespace_='', name_='IaFTauCell', fromsubclass_=False):
super(IaFTauCell, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(IaFTauCell, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IaFTauCell'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.reset is not None and 'reset' not in already_processed:
already_processed.append('reset')
showIndent(outfile, level)
outfile.write('reset = "%s",\n' % (self.reset,))
if self.tau is not None and 'tau' not in already_processed:
already_processed.append('tau')
showIndent(outfile, level)
outfile.write('tau = "%s",\n' % (self.tau,))
if self.thresh is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
showIndent(outfile, level)
outfile.write('thresh = "%s",\n' % (self.thresh,))
if self.leakReversal is not None and 'leakReversal' not in already_processed:
already_processed.append('leakReversal')
showIndent(outfile, level)
outfile.write('leakReversal = "%s",\n' % (self.leakReversal,))
super(IaFTauCell, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(IaFTauCell, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('reset', node)
if value is not None and 'reset' not in already_processed:
already_processed.append('reset')
self.reset = value
self.validate_Nml2Quantity_voltage(self.reset) # validate type Nml2Quantity_voltage
value = find_attr_value_('tau', node)
if value is not None and 'tau' not in already_processed:
already_processed.append('tau')
self.tau = value
self.validate_Nml2Quantity_time(self.tau) # validate type Nml2Quantity_time
value = find_attr_value_('thresh', node)
if value is not None and 'thresh' not in already_processed:
already_processed.append('thresh')
self.thresh = value
self.validate_Nml2Quantity_voltage(self.thresh) # validate type Nml2Quantity_voltage
value = find_attr_value_('leakReversal', node)
if value is not None and 'leakReversal' not in already_processed:
already_processed.append('leakReversal')
self.leakReversal = value
self.validate_Nml2Quantity_voltage(self.leakReversal) # validate type Nml2Quantity_voltage
super(IaFTauCell, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(IaFTauCell, self).buildChildren(child_, node, nodeName_, True)
pass
# end class IaFTauCell
class ExpTwoSynapse(ConductanceBasedSynapse):
subclass = None
superclass = ConductanceBasedSynapse
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, erev=None, gbase=None, tauDecay=None, tauRise=None, extensiontype_=None):
super(ExpTwoSynapse, self).__init__(id, neuroLexId, metaid, notes, annotation, erev, gbase, extensiontype_, )
self.tauDecay = _cast(None, tauDecay)
self.tauRise = _cast(None, tauRise)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if ExpTwoSynapse.subclass:
return ExpTwoSynapse.subclass(*args_, **kwargs_)
else:
return ExpTwoSynapse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tauDecay(self): return self.tauDecay
def set_tauDecay(self, tauDecay): self.tauDecay = tauDecay
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def get_tauRise(self): return self.tauRise
def set_tauRise(self, tauRise): self.tauRise = tauRise
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='', name_='ExpTwoSynapse', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ExpTwoSynapse')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ExpTwoSynapse'):
super(ExpTwoSynapse, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ExpTwoSynapse')
if self.tauDecay is not None and 'tauDecay' not in already_processed:
already_processed.append('tauDecay')
outfile.write(' tauDecay=%s' % (quote_attrib(self.tauDecay), ))
if self.tauRise is not None and 'tauRise' not in already_processed:
already_processed.append('tauRise')
outfile.write(' tauRise=%s' % (quote_attrib(self.tauRise), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='', name_='ExpTwoSynapse', fromsubclass_=False):
super(ExpTwoSynapse, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(ExpTwoSynapse, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ExpTwoSynapse'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.tauDecay is not None and 'tauDecay' not in already_processed:
already_processed.append('tauDecay')
showIndent(outfile, level)
outfile.write('tauDecay = "%s",\n' % (self.tauDecay,))
if self.tauRise is not None and 'tauRise' not in already_processed:
already_processed.append('tauRise')
showIndent(outfile, level)
outfile.write('tauRise = "%s",\n' % (self.tauRise,))
super(ExpTwoSynapse, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ExpTwoSynapse, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('tauDecay', node)
if value is not None and 'tauDecay' not in already_processed:
already_processed.append('tauDecay')
self.tauDecay = value
self.validate_Nml2Quantity_time(self.tauDecay) # validate type Nml2Quantity_time
value = find_attr_value_('tauRise', node)
if value is not None and 'tauRise' not in already_processed:
already_processed.append('tauRise')
self.tauRise = value
self.validate_Nml2Quantity_time(self.tauRise) # validate type Nml2Quantity_time
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
super(ExpTwoSynapse, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ExpTwoSynapse, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ExpTwoSynapse
class ExpOneSynapse(ConductanceBasedSynapse):
subclass = None
superclass = ConductanceBasedSynapse
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, erev=None, gbase=None, tauDecay=None):
super(ExpOneSynapse, self).__init__(id, neuroLexId, metaid, notes, annotation, erev, gbase, )
self.tauDecay = _cast(None, tauDecay)
pass
def factory(*args_, **kwargs_):
if ExpOneSynapse.subclass:
return ExpOneSynapse.subclass(*args_, **kwargs_)
else:
return ExpOneSynapse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_tauDecay(self): return self.tauDecay
def set_tauDecay(self, tauDecay): self.tauDecay = tauDecay
def validate_Nml2Quantity_time(self, value):
# Validate type Nml2Quantity_time, a restriction on xs:string.
pass
def export(self, outfile, level, namespace_='', name_='ExpOneSynapse', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ExpOneSynapse')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ExpOneSynapse'):
super(ExpOneSynapse, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ExpOneSynapse')
if self.tauDecay is not None and 'tauDecay' not in already_processed:
already_processed.append('tauDecay')
outfile.write(' tauDecay=%s' % (quote_attrib(self.tauDecay), ))
def exportChildren(self, outfile, level, namespace_='', name_='ExpOneSynapse', fromsubclass_=False):
super(ExpOneSynapse, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(ExpOneSynapse, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ExpOneSynapse'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.tauDecay is not None and 'tauDecay' not in already_processed:
already_processed.append('tauDecay')
showIndent(outfile, level)
outfile.write('tauDecay = "%s",\n' % (self.tauDecay,))
super(ExpOneSynapse, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ExpOneSynapse, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('tauDecay', node)
if value is not None and 'tauDecay' not in already_processed:
already_processed.append('tauDecay')
self.tauDecay = value
self.validate_Nml2Quantity_time(self.tauDecay) # validate type Nml2Quantity_time
super(ExpOneSynapse, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ExpOneSynapse, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ExpOneSynapse
class StpSynapse(ExpTwoSynapse):
subclass = None
superclass = ExpTwoSynapse
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, erev=None, gbase=None, tauDecay=None, tauRise=None, stpMechanism=None):
super(StpSynapse, self).__init__(id, neuroLexId, metaid, notes, annotation, erev, gbase, tauDecay, tauRise, )
self.stpMechanism = stpMechanism
def factory(*args_, **kwargs_):
if StpSynapse.subclass:
return StpSynapse.subclass(*args_, **kwargs_)
else:
return StpSynapse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_stpMechanism(self): return self.stpMechanism
def set_stpMechanism(self, stpMechanism): self.stpMechanism = stpMechanism
def export(self, outfile, level, namespace_='', name_='StpSynapse', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='StpSynapse')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='StpSynapse'):
super(StpSynapse, self).exportAttributes(outfile, level, already_processed, namespace_, name_='StpSynapse')
def exportChildren(self, outfile, level, namespace_='', name_='StpSynapse', fromsubclass_=False):
super(StpSynapse, self).exportChildren(outfile, level, namespace_, name_, True)
if self.stpMechanism is not None:
self.stpMechanism.export(outfile, level, namespace_, name_='stpMechanism', )
def hasContent_(self):
if (
self.stpMechanism is not None or
super(StpSynapse, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='StpSynapse'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(StpSynapse, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(StpSynapse, self).exportLiteralChildren(outfile, level, name_)
if self.stpMechanism is not None:
showIndent(outfile, level)
outfile.write('stpMechanism=model_.StpMechanism(\n')
self.stpMechanism.exportLiteral(outfile, level, name_='stpMechanism')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(StpSynapse, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'stpMechanism':
obj_ = StpMechanism.factory()
obj_.build(child_)
self.set_stpMechanism(obj_)
super(StpSynapse, self).buildChildren(child_, node, nodeName_, True)
# end class StpSynapse
class NmdaSynapse(ExpTwoSynapse):
subclass = None
superclass = ExpTwoSynapse
def __init__(self, id=None, neuroLexId=None, metaid=None, notes=None, annotation=None, erev=None, gbase=None, tauDecay=None, tauRise=None, voltageConcDepBlock=None):
super(NmdaSynapse, self).__init__(id, neuroLexId, metaid, notes, annotation, erev, gbase, tauDecay, tauRise, )
self.voltageConcDepBlock = voltageConcDepBlock
def factory(*args_, **kwargs_):
if NmdaSynapse.subclass:
return NmdaSynapse.subclass(*args_, **kwargs_)
else:
return NmdaSynapse(*args_, **kwargs_)
factory = staticmethod(factory)
def get_voltageConcDepBlock(self): return self.voltageConcDepBlock
def set_voltageConcDepBlock(self, voltageConcDepBlock): self.voltageConcDepBlock = voltageConcDepBlock
def export(self, outfile, level, namespace_='', name_='NmdaSynapse', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NmdaSynapse')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='NmdaSynapse'):
super(NmdaSynapse, self).exportAttributes(outfile, level, already_processed, namespace_, name_='NmdaSynapse')
def exportChildren(self, outfile, level, namespace_='', name_='NmdaSynapse', fromsubclass_=False):
super(NmdaSynapse, self).exportChildren(outfile, level, namespace_, name_, True)
if self.voltageConcDepBlock is not None:
self.voltageConcDepBlock.export(outfile, level, namespace_, name_='voltageConcDepBlock', )
def hasContent_(self):
if (
self.voltageConcDepBlock is not None or
super(NmdaSynapse, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='NmdaSynapse'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(NmdaSynapse, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(NmdaSynapse, self).exportLiteralChildren(outfile, level, name_)
if self.voltageConcDepBlock is not None:
showIndent(outfile, level)
outfile.write('voltageConcDepBlock=model_.VoltageConcDepBlock(\n')
self.voltageConcDepBlock.exportLiteral(outfile, level, name_='voltageConcDepBlock')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(NmdaSynapse, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'voltageConcDepBlock':
obj_ = VoltageConcDepBlock.factory()
obj_.build(child_)
self.set_voltageConcDepBlock(obj_)
super(NmdaSynapse, self).buildChildren(child_, node, nodeName_, True)
# end class NmdaSynapse
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'neuroml'
rootClass = neuroml
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
## sys.stdout.write('<?xml version="1.0" ?>\n')
## rootObj.export(sys.stdout, 0, name_=rootTag,
## namespacedef_='xmlns:nml2="http://www.neuroml.org/schema/neuroml2"')
return rootObj
def parseString(inString):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'neuroml'
rootClass = neuroml
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
## sys.stdout.write('<?xml version="1.0" ?>\n')
## rootObj.export(sys.stdout, 0, name_="neuroml",
## namespacedef_='xmlns:nml2="http://www.neuroml.org/schema/neuroml2"')
return rootObj
def parseLiteral(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'neuroml'
rootClass = neuroml
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
## sys.stdout.write('#from nml2 import *\n\n')
## sys.stdout.write('import nml2 as model_\n\n')
## sys.stdout.write('rootObj = model_.rootTag(\n')
## rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
## sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"AbstractCell",
"AdExIaFCell",
"Annotation",
"Base",
"BiophysicalProperties",
"Cell",
"CellSet",
"ChannelDensity",
"ChannelPopulation",
"ConcentrationModel_D",
"ConductanceBasedSynapse",
"Connection",
"DecayingPoolConcentrationModel",
"DistalDetails",
"ExpOneSynapse",
"ExpTwoSynapse",
"ExplicitInput",
"ExtracellularProperties",
"FixedConcentration",
"Gate",
"GridLayout",
"IaFCell",
"IaFTauCell",
"Include",
"InhomogeneousParam",
"InhomogeneousValue",
"Instance",
"Instances",
"IntracellularProperties",
"IonChannel",
"IzhikevichCell",
"Layout",
"Location",
"Member",
"MembraneProperties",
"Morphology",
"Network",
"NmdaSynapse",
"Path",
"Point3DWithDiam",
"Population",
"Projection",
"ProximalDetails",
"PulseGenerator",
"RandomLayout",
"ReactionScheme",
"Region",
"ReversalPotential",
"Segment",
"SegmentEndPoint",
"SegmentGroup",
"SegmentParent",
"Space",
"SpaceStructure",
"Species",
"Standalone",
"StpMechanism",
"StpSynapse",
"SubTree",
"SynapticConnection",
"UnstructuredLayout",
"ValueAcrossSegOrSegGroup",
"VariableParameter",
"VoltageConcDepBlock",
"includeType",
"neuroml"
]
| 48.830236
| 437
| 0.646051
| 43,061
| 407,293
| 5.920229
| 0.012982
| 0.069383
| 0.018746
| 0.057902
| 0.79163
| 0.741812
| 0.704653
| 0.676595
| 0.648756
| 0.635011
| 0
| 0.002191
| 0.245838
| 407,293
| 8,340
| 438
| 48.836091
| 0.827757
| 0.023548
| 0
| 0.667423
| 1
| 0
| 0.067645
| 0.010489
| 0
| 0
| 0
| 0.00024
| 0
| 0
| null | null | 0.02318
| 0.004157
| null | null | 0.000756
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3fcfb323c03c06e3c40cc4f32b4eaed34cc9281d
| 6,104
|
py
|
Python
|
project/editorial/migrations/0009_auto_20160319_2213.py
|
cojennin/facet
|
230e65316134b3399a35d40034728e61ba63cb2a
|
[
"MIT"
] | 25
|
2015-07-13T22:16:36.000Z
|
2021-11-11T02:45:32.000Z
|
project/editorial/migrations/0009_auto_20160319_2213.py
|
cojennin/facet
|
230e65316134b3399a35d40034728e61ba63cb2a
|
[
"MIT"
] | 74
|
2015-12-01T18:57:47.000Z
|
2022-03-11T23:25:47.000Z
|
project/editorial/migrations/0009_auto_20160319_2213.py
|
cojennin/facet
|
230e65316134b3399a35d40034728e61ba63cb2a
|
[
"MIT"
] | 6
|
2016-01-08T21:12:43.000Z
|
2019-05-20T16:07:56.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.contrib.postgres.fields
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('editorial', '0008_auto_20160319_2059'),
]
operations = [
migrations.CreateModel(
name='AudioAsset',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('original', models.BooleanField(default=True, help_text=b'This content originally belonged to this organization.')),
('asset_title', models.CharField(help_text=b'Text for file name. Name it intuitively.', max_length=200, blank=True)),
('asset_description', models.TextField(help_text=b'What is the asset.', max_length=300, blank=True)),
('attribution', models.TextField(help_text=b'The appropriate information for crediting the asset.', max_length=200, blank=True)),
('audio', models.FileField(upload_to=b'audio', blank=True)),
('link', models.TextField(help_text=b'Link to audio file on SoundCloud', max_length=400, blank=True)),
('audio_type', models.CharField(help_text=b'The kind of audio.', max_length=20, choices=[(b'MP3', b'mp3'), (b'WAV', b'wav'), (b'SC', b'SoundCloud')])),
('creation_date', models.DateTimeField(help_text=b'When the asset was created.', auto_now_add=True)),
('keywords', django.contrib.postgres.fields.ArrayField(default=list, help_text=b'List of keywords for search.', size=None, base_field=models.CharField(max_length=100), blank=True)),
('organization', models.ForeignKey(related_name='audio_asset_organization', to='editorial.Organization')),
('owner', models.ForeignKey(related_name='audio_asset_owner', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Audio Asset',
'verbose_name_plural': 'Audio Assets',
},
),
migrations.CreateModel(
name='VideoAsset',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('original', models.BooleanField(default=True, help_text=b'This content originally belonged to this organization.')),
('asset_title', models.CharField(help_text=b'Text for file name. Name it intuitively.', max_length=200, blank=True)),
('asset_description', models.TextField(help_text=b'What is the asset.', max_length=300, blank=True)),
('attribution', models.TextField(help_text=b'The appropriate information for crediting the asset.', max_length=200, blank=True)),
('video', models.FileField(upload_to=b'videos', blank=True)),
('link', models.TextField(help_text=b'Link to video file on YouTube or Vimeo', max_length=400, blank=True)),
('video_type', models.CharField(help_text=b'The kind of video.', max_length=20, choices=[(b'MP4', b'mp4'), (b'YOUTUBE', b'YouTube'), (b'VIMEO', b'Vimeo')])),
('creation_date', models.DateTimeField(help_text=b'When the asset was created.', auto_now_add=True)),
('keywords', django.contrib.postgres.fields.ArrayField(default=list, help_text=b'List of keywords for search.', size=None, base_field=models.CharField(max_length=100), blank=True)),
('organization', models.ForeignKey(related_name='video_asset_organization', to='editorial.Organization')),
('owner', models.ForeignKey(related_name='video_asset_owner', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Video Asset',
'verbose_name_plural': 'Video Assets',
},
),
migrations.AlterField(
model_name='documentasset',
name='doc_type',
field=models.CharField(help_text=b'The kind of document.', max_length=20, choices=[(b'PDF', b'Adobe PDF'), (b'WORD DOC', b'Word Doc'), (b'TEXT', b'Text File'), (b'COMMA SEPARATED', b'Comma Separated'), (b'EXCEL', b'Excel File'), (b'OTHER', b'Other')]),
),
migrations.AlterField(
model_name='imageasset',
name='image_type',
field=models.CharField(help_text=b'The kind of image.', max_length=20, choices=[(b'PIC', b'Photograph'), (b'GRAPH', b'Graphic')]),
),
migrations.AddField(
model_name='audiofacet',
name='audio_assets',
field=models.ManyToManyField(to='editorial.AudioAsset', blank=True),
),
migrations.AddField(
model_name='audiofacet',
name='video_assets',
field=models.ManyToManyField(to='editorial.VideoAsset', blank=True),
),
migrations.AddField(
model_name='printfacet',
name='audio_assets',
field=models.ManyToManyField(to='editorial.AudioAsset', blank=True),
),
migrations.AddField(
model_name='printfacet',
name='video_assets',
field=models.ManyToManyField(to='editorial.VideoAsset', blank=True),
),
migrations.AddField(
model_name='videofacet',
name='audio_assets',
field=models.ManyToManyField(to='editorial.AudioAsset', blank=True),
),
migrations.AddField(
model_name='videofacet',
name='video_assets',
field=models.ManyToManyField(to='editorial.VideoAsset', blank=True),
),
migrations.AddField(
model_name='webfacet',
name='audio_assets',
field=models.ManyToManyField(to='editorial.AudioAsset', blank=True),
),
migrations.AddField(
model_name='webfacet',
name='video_assets',
field=models.ManyToManyField(to='editorial.VideoAsset', blank=True),
),
]
| 56
| 264
| 0.618775
| 682
| 6,104
| 5.385631
| 0.206745
| 0.049006
| 0.044106
| 0.058808
| 0.794446
| 0.749251
| 0.73101
| 0.73101
| 0.715764
| 0.695617
| 0
| 0.012743
| 0.241481
| 6,104
| 108
| 265
| 56.518519
| 0.780562
| 0.00344
| 0
| 0.666667
| 0
| 0
| 0.27035
| 0.018911
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039216
| 0
| 0.068627
| 0.019608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3fcfde19e2b64aa43010d4b643d216399f41c72b
| 210
|
py
|
Python
|
online_library/book/models.py
|
svaradinov/online_library
|
183f841dc27e899136a437b37221e1385abe0c33
|
[
"MIT"
] | null | null | null |
online_library/book/models.py
|
svaradinov/online_library
|
183f841dc27e899136a437b37221e1385abe0c33
|
[
"MIT"
] | null | null | null |
online_library/book/models.py
|
svaradinov/online_library
|
183f841dc27e899136a437b37221e1385abe0c33
|
[
"MIT"
] | null | null | null |
from django.db import models
class Book(models.Model):
title = models.CharField(max_length=30)
description = models.TextField()
image = models.URLField()
type = models.CharField(max_length=30)
| 26.25
| 43
| 0.72381
| 27
| 210
| 5.555556
| 0.666667
| 0.2
| 0.24
| 0.32
| 0.346667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022857
| 0.166667
| 210
| 7
| 44
| 30
| 0.834286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
3fd571a65f05655a35a424900fcaf64e68d459b6
| 15,867
|
py
|
Python
|
Exploration-API/api.py
|
SDM-TIB/Knowledge4COVID-19
|
ef2ad3a103db080923621f265497054a957413d1
|
[
"Apache-2.0"
] | null | null | null |
Exploration-API/api.py
|
SDM-TIB/Knowledge4COVID-19
|
ef2ad3a103db080923621f265497054a957413d1
|
[
"Apache-2.0"
] | null | null | null |
Exploration-API/api.py
|
SDM-TIB/Knowledge4COVID-19
|
ef2ad3a103db080923621f265497054a957413d1
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
#
# Description: POST service for exploration of
# data of Lung Cancer in the iASiS KG.
#
import sys
from flask import Flask, abort, request, make_response
import json
from SPARQLWrapper import SPARQLWrapper, JSON
import logging
import os
import itertools
import get_publication
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
KG = os.environ["ENDPOINT"]
EMPTY_JSON = "{}"
app = Flask(__name__)
############################
#
# Query constants
#
############################
QUERY_DRUG_TO_DRUGS_INTERACTIONS ="""
SELECT DISTINCT ?effectorDrugLabel ?affectdDrugLabel ?effect AS ?effectLabel ?impactLabel WHERE {
?interaction <http://covid-19.tib.eu/vocab/precipitant_hasDrugBankID> ?effectorDrug.
?interaction <http://covid-19.tib.eu/vocab/object_hasDrugBankID> ?affectdDrug.
?affectdDrug <http://covid-19.tib.eu/vocab/hasCUIAnnotation> ?affectdDrugCUI.
?effectorDrug <http://covid-19.tib.eu/vocab/drugLabel> ?effectorDrugLabel.
?affectdDrug <http://covid-19.tib.eu/vocab/drugLabel> ?affectdDrugLabel.
?interaction <http://covid-19.tib.eu/vocab/effect> ?effectCUI.
?effect <http://covid-19.tib.eu/vocab/hasCUIAnnotation> ?effectCUI.
?effect a <http://covid-19.tib.eu/vocab/AdverseEvent>.
?interaction <http://covid-19.tib.eu/vocab/impact> ?impactLabel.
"""
QUERY_DRUG_TO_DRUGS_INTERACTIONS_PREDICTED ="""
SELECT DISTINCT ?effectorDrugLabel ?affectdDrugLabel ?confidence ?provenance WHERE {
?interaction a <http://covid-19.tib.eu/vocab/DrugDrugPrediction>.
?interaction <http://covid-19.tib.eu/vocab/interactor1> ?effectorDrug.
?interaction <http://covid-19.tib.eu/vocab/interactor2> ?affectdDrug.
?affectdDrug <http://covid-19.tib.eu/vocab/hasCUIAnnotation> ?affectdDrugCUI.
?effectorDrug <http://covid-19.tib.eu/vocab/drugLabel> ?effectorDrugLabel.
?affectdDrug <http://covid-19.tib.eu/vocab/drugLabel> ?affectdDrugLabel.
?interaction <http://covid-19.tib.eu/vocab/confidence> ?confidence.
?interaction <http://covid-19.tib.eu/vocab/predictionMethod> ?provenance.
"""
QUERY_DRUGS_TO_DRUGS_INTERACTIONS ="""
SELECT * {{
{{SELECT DISTINCT ?effectorDrugLabel ?affectdDrugLabel ?effect AS ?effectLabel ?impactLabel WHERE {{
?interaction <http://covid-19.tib.eu/vocab/precipitantDrug> <http://covid-19.tib.eu/Annotation/{0}>.
?interaction <http://covid-19.tib.eu/vocab/objectDrug> <http://covid-19.tib.eu/Annotation/{1}>.
?interaction <http://covid-19.tib.eu/vocab/precipitant_hasDrugBankID> ?effectorDrug.
?interaction <http://covid-19.tib.eu/vocab/object_hasDrugBankID> ?affectedDrug.
?effectorDrug <http://covid-19.tib.eu/vocab/drugLabel> ?effectorDrugLabel.
?affectedDrug <http://covid-19.tib.eu/vocab/drugLabel> ?affectdDrugLabel.
?interaction <http://covid-19.tib.eu/vocab/effect> ?effectCUI.
?effect <http://covid-19.tib.eu/vocab/hasCUIAnnotation> ?effectCUI.
?effect a <http://covid-19.tib.eu/vocab/AdverseEvent>.
?interaction <http://covid-19.tib.eu/vocab/impact> ?impactLabel.
}}}} UNION
{{SELECT DISTINCT ?effectorDrugLabel ?affectdDrugLabel ?effect AS ?effectLabel ?impactLabel WHERE {{
?interaction <http://covid-19.tib.eu/vocab/precipitantDrug> <http://covid-19.tib.eu/Annotation/{1}>.
?interaction <http://covid-19.tib.eu/vocab/objectDrug> <http://covid-19.tib.eu/Annotation/{0}>.
?interaction <http://covid-19.tib.eu/vocab/precipitant_hasDrugBankID> ?effectorDrug.
?interaction <http://covid-19.tib.eu/vocab/object_hasDrugBankID> ?affectedDrug.
?effectorDrug <http://covid-19.tib.eu/vocab/drugLabel> ?effectorDrugLabel.
?affectedDrug <http://covid-19.tib.eu/vocab/drugLabel> ?affectdDrugLabel.
?interaction <http://covid-19.tib.eu/vocab/effect> ?effectCUI.
?effect <http://covid-19.tib.eu/vocab/hasCUIAnnotation> ?effectCUI.
?effect a <http://covid-19.tib.eu/vocab/AdverseEvent>.
?interaction <http://covid-19.tib.eu/vocab/impact> ?impactLabel.
}}}}}}
"""
QUERY_DRUGS_TO_DRUGS_INTERACTIONS_PREDICTED ="""
SELECT * {{
{{SELECT DISTINCT ?effectorDrugLabel ?affectdDrugLabel ?confidence ?provenance WHERE {{
?interaction <http://covid-19.tib.eu/vocab/interactor1> ?effectorDrug.
?interaction <http://covid-19.tib.eu/vocab/interactor2> ?affectedDrug.
?effectorDrug <http://covid-19.tib.eu/vocab/hasCUIAnnotation> <http://covid-19.tib.eu/Annotation/{0}>.
?affectedDrug <http://covid-19.tib.eu/vocab/hasCUIAnnotation> <http://covid-19.tib.eu/Annotation/{1}>.
?effectorDrug <http://covid-19.tib.eu/vocab/drugLabel> ?effectorDrugLabel.
?affectedDrug <http://covid-19.tib.eu/vocab/drugLabel> ?affectdDrugLabel.
?interaction <http://covid-19.tib.eu/vocab/confidence> ?confidence.
?interaction <http://covid-19.tib.eu/vocab/predictionMethod> ?provenance.
}}}} UNION
{{SELECT DISTINCT ?effectorDrugLabel ?affectdDrugLabel ?confidence ?provenance WHERE {{
?interaction <http://covid-19.tib.eu/vocab/interactor1> ?effectorDrug.
?interaction <http://covid-19.tib.eu/vocab/interactor2> ?affectedDrug.
?effectorDrug <http://covid-19.tib.eu/vocab/hasCUIAnnotation> <http://covid-19.tib.eu/Annotation/{1}>.
?affectedDrug <http://covid-19.tib.eu/vocab/hasCUIAnnotation> <http://covid-19.tib.eu/Annotation/{0}>.
?effectorDrug <http://covid-19.tib.eu/vocab/drugLabel> ?effectorDrugLabel.
?affectedDrug <http://covid-19.tib.eu/vocab/drugLabel> ?affectdDrugLabel.
?interaction <http://covid-19.tib.eu/vocab/confidence> ?confidence.
?interaction <http://covid-19.tib.eu/vocab/predictionMethod> ?provenance.
}}}}}}
"""
############################
#
# Query generation
#
############################
def execute_query(query,limit,page):
if limit!=0:
query+="LIMIT "+str(limit)
query+=" OFFSET "+str(page)
sparql_ins = SPARQLWrapper(KG)
sparql_ins.setQuery(query)
sparql_ins.setReturnFormat(JSON)
return sparql_ins.query().convert()['results']['bindings']
############################
#
# Processing results
#
############################
def drug2_interactions_query(drug,limit,page):
query=QUERY_DRUG_TO_DRUGS_INTERACTIONS
query+="FILTER(?affectdDrugCUI in ("
query+="<http://covid-19.tib.eu/Annotation/"+drug+">"
query+="))}"
qresults = execute_query(query,limit,page)
return qresults
def drug2_interactions_predicted_query(drug,limit,page):
query=QUERY_DRUG_TO_DRUGS_INTERACTIONS_PREDICTED
query+="FILTER(?affectdDrugCUI in ("
query+="<http://covid-19.tib.eu/Annotation/"+drug+">"
query+="))}"
qresults = execute_query(query,limit,page)
return qresults
def drugs2_interactions_query(drug_pairs,limit,page):
query=QUERY_DRUGS_TO_DRUGS_INTERACTIONS.format(drug_pairs[0],drug_pairs[1])
qresults = execute_query(query,limit,page)
return qresults
def drugs2_interactions_predicted_query(drug_pairs,limit,page):
query=QUERY_DRUGS_TO_DRUGS_INTERACTIONS_PREDICTED.format(drug_pairs[0],drug_pairs[1])
qresults = execute_query(query,limit,page)
return qresults
def proccesing_response(input_dicc, target,limit,page,sort):
cuis=dict()
results=dict()
drugInteractions=dict()
for elem in input_dicc:
lcuis = input_dicc[elem]
if len(lcuis)==0:
continue
for item in lcuis:
cuis[item]=elem
if len(cuis)==0:
continue
############################Interactions#####################################
if elem=='Drugs':
if target=="DDI":
for drug in lcuis:
query_reslut=drug2_interactions_query(drug,limit,page)
drugInteractions[drug]=dict()
if len(query_reslut)>0:
drugInteractions[drug]["Label"]=query_reslut[0]["affectdDrugLabel"]["value"]
drugInteractions[drug]["DDI"]=[]
for result in query_reslut:
interaction=dict()
interaction["effectorDrug"]=result["effectorDrugLabel"]["value"]
interaction["affectdDrug"]=result["affectdDrugLabel"]["value"]
interaction["effect"]=result["effectLabel"]["value"].replace('http://covid-19.tib.eu/AdverseEvent/','').split('_')[0]
interaction["impact"]=result["impactLabel"]["value"]
drugInteractions[drug]["DDI"].append(interaction)
elif target=="DDIS":
drugs_pairs=[(x,y) for x,y in list(itertools.product(lcuis, lcuis)) if x!=y and x<y]
for drug_pair in drugs_pairs :
query_reslut=drugs2_interactions_query(drug_pair,limit,page)
drugInteractions[str(drug_pair)]=dict()
if len(query_reslut)>0:
drugInteractions[str(drug_pair)]["Labels"]=query_reslut[0]["affectdDrugLabel"]["value"]+" AND "+query_reslut[0]["effectorDrugLabel"]["value"]
drugInteractions[str(drug_pair)]["DDIS"]=[]
for result in query_reslut:
interaction=dict()
interaction["effectorDrug"]=result["effectorDrugLabel"]["value"]
interaction["affectdDrug"]=result["affectdDrugLabel"]["value"]
interaction["effect"]=result["effectLabel"]["value"].replace('http://covid-19.tib.eu/AdverseEvent/','').split('_')[0]
interaction["impact"]=result["impactLabel"]["value"]
if interaction not in drugInteractions[str(drug_pair)]["DDIS"]:
drugInteractions[str(drug_pair)]["DDIS"].append(interaction)
elif target=="DDIP":
#drugs_pairs=[(x,y) for x,y in list(itertools.product(lcuis, lcuis)) if x!=y]
for drug in lcuis:
query_reslut=drug2_interactions_predicted_query(drug,limit,page)
drugInteractions[drug]=dict()
if len(query_reslut)>0:
drugInteractions[drug]["Label"]=query_reslut[0]["affectdDrugLabel"]["value"]
drugInteractions[drug]["DDIP"]=[]
for result in query_reslut:
interaction=dict()
interaction["effectorDrug"]=result["effectorDrugLabel"]["value"]
interaction["affectdDrug"]=result["affectdDrugLabel"]["value"]
interaction["confidence"]=result["confidence"]["value"]
interaction["provenance"]=result["provenance"]["value"]
drugInteractions[drug]["DDIP"].append(interaction)
elif target=="DDIPS":
drugs_pairs=[(x,y) for x,y in list(itertools.product(lcuis, lcuis)) if x!=y and x<y]
for drug_pair in drugs_pairs :
query_reslut=drugs2_interactions_predicted_query(drug_pair,limit,page)
drugInteractions[str(drug_pair)]=dict()
if len(query_reslut)>0:
drugInteractions[str(drug_pair)]["Labels"]=query_reslut[0]["affectdDrugLabel"]["value"]+" AND "+query_reslut[0]["effectorDrugLabel"]["value"]
drugInteractions[str(drug_pair)]["DDIPS"]=[]
for result in query_reslut:
interaction=dict()
interaction["effectorDrug"]=result["effectorDrugLabel"]["value"]
interaction["affectdDrug"]=result["affectdDrugLabel"]["value"]
interaction["confidence"]=result["confidence"]["value"]
interaction["provenance"]=result["provenance"]["value"]
drugInteractions[str(drug_pair)]["DDIPS"].append(interaction)
results['Interactions']=drugInteractions
return results
@app.route('/covid19kg-exp', methods=['POST'])
def run_exploration_api():
if (not request.json):
abort(400)
if 'limit' in request.args:
limit = int(request.args['limit'])
else:
limit = 0
if 'page' in request.args:
page = int(request.args['page'])
else:
page = 0
if 'sort' in request.args:
sort = request.args['sort']
else:
sort = 0
if 'target' in request.args:
target = request.args['target']
else:
abort(400)
input_list = request.json
if len(input_list) == 0:
logger.info("Error in the input format")
r = "{results: 'Error in the input format'}"
else:
if target!="Pub":
response = proccesing_response(input_list,target,limit,page,sort)
elif target=="Pub":
response=get_publication.process(input_list,KG)
r = json.dumps(response, indent=4)
logger.info("Sending the results: ")
response = make_response(r, 200)
response.mimetype = "application/json"
return response
def main(*args):
if len(args) == 1:
myhost = args[0]
else:
myhost = "0.0.0.0"
app.run(debug=False, host=myhost)
if __name__ == '__main__':
main(*sys.argv[1:])
| 50.855769
| 165
| 0.529022
| 1,432
| 15,867
| 5.759078
| 0.120112
| 0.070935
| 0.086698
| 0.110343
| 0.798472
| 0.771068
| 0.756396
| 0.749485
| 0.729235
| 0.729235
| 0
| 0.018526
| 0.336611
| 15,867
| 311
| 166
| 51.019293
| 0.764963
| 0.015882
| 0
| 0.525862
| 0
| 0.034483
| 0.551116
| 0.002863
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.034483
| 0
| 0.099138
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b787430d2bc3052bbe09a36827d7163bce164209
| 150
|
py
|
Python
|
Python-Advanced/file_handling/file_delete.py
|
Xamaneone/SoftUni-Intro
|
985fe3249cd2adf021c2003372e840219811d989
|
[
"MIT"
] | null | null | null |
Python-Advanced/file_handling/file_delete.py
|
Xamaneone/SoftUni-Intro
|
985fe3249cd2adf021c2003372e840219811d989
|
[
"MIT"
] | null | null | null |
Python-Advanced/file_handling/file_delete.py
|
Xamaneone/SoftUni-Intro
|
985fe3249cd2adf021c2003372e840219811d989
|
[
"MIT"
] | null | null | null |
import os
if os.path.exists('File Writer/my_first_file.txt'):
os.remove('File Writer/my_first_file.txt')
else:
print('File already deleted!')
| 25
| 51
| 0.726667
| 25
| 150
| 4.2
| 0.6
| 0.190476
| 0.228571
| 0.32381
| 0.457143
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126667
| 150
| 6
| 52
| 25
| 0.801527
| 0
| 0
| 0
| 0
| 0
| 0.523179
| 0.317881
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0.2
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b79866cfb4d2d5d4a52a30cf2eaba22e1663994e
| 66
|
py
|
Python
|
ILPSumNLP/core/__init__.py
|
dnanhkhoa/ILPSumNLM
|
6a8a1aa6c242f135d6844fed21760a1f9c03684a
|
[
"Apache-2.0"
] | null | null | null |
ILPSumNLP/core/__init__.py
|
dnanhkhoa/ILPSumNLM
|
6a8a1aa6c242f135d6844fed21760a1f9c03684a
|
[
"Apache-2.0"
] | null | null | null |
ILPSumNLP/core/__init__.py
|
dnanhkhoa/ILPSumNLM
|
6a8a1aa6c242f135d6844fed21760a1f9c03684a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf8 -*-
from .wordgraph import *
| 16.5
| 24
| 0.621212
| 8
| 66
| 5.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0.151515
| 66
| 3
| 25
| 22
| 0.714286
| 0.560606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b7ba897a38d394df17fb1391723e05e59a594af7
| 76
|
py
|
Python
|
IA/Python/4/4.2/2.py
|
worthl3ss/random-small
|
ffb60781f57eb865acbd81aaa07056046bad32fe
|
[
"MIT"
] | 1
|
2022-02-23T12:47:00.000Z
|
2022-02-23T12:47:00.000Z
|
IA/Python/4/4.2/2.py
|
worthl3ss/random-small
|
ffb60781f57eb865acbd81aaa07056046bad32fe
|
[
"MIT"
] | null | null | null |
IA/Python/4/4.2/2.py
|
worthl3ss/random-small
|
ffb60781f57eb865acbd81aaa07056046bad32fe
|
[
"MIT"
] | null | null | null |
import re
print(re.compile("[a-zA-Z]*[0-9]{3,}[a-zA-Z]*").findall(input()))
| 25.333333
| 65
| 0.592105
| 16
| 76
| 2.8125
| 0.75
| 0.133333
| 0.177778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041096
| 0.039474
| 76
| 2
| 66
| 38
| 0.575342
| 0
| 0
| 0
| 0
| 0
| 0.355263
| 0.355263
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
b7e04d4e59132da58a5653c68a9e51f3cdf11b5f
| 44
|
py
|
Python
|
hello-python.py
|
mtb-beta/Hello-Github
|
765d6e362ee142707bc350ed3b697036c277daca
|
[
"MIT"
] | null | null | null |
hello-python.py
|
mtb-beta/Hello-Github
|
765d6e362ee142707bc350ed3b697036c277daca
|
[
"MIT"
] | 1
|
2021-09-19T09:16:14.000Z
|
2021-09-19T09:16:14.000Z
|
hello-python.py
|
mtb-beta/Hello-Github
|
765d6e362ee142707bc350ed3b697036c277daca
|
[
"MIT"
] | null | null | null |
#-*-coding:utf-8-*-
print("Hello Github.")
| 11
| 22
| 0.590909
| 6
| 44
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.090909
| 44
| 3
| 23
| 14.666667
| 0.625
| 0.409091
| 0
| 0
| 0
| 0
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
4d0fd2724af76d5c34b082b25e8fae89ff294a34
| 90
|
py
|
Python
|
enthought/chaco/base_data_range.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/chaco/base_data_range.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/chaco/base_data_range.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from chaco.base_data_range import *
| 22.5
| 38
| 0.844444
| 13
| 90
| 5.307692
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122222
| 90
| 3
| 39
| 30
| 0.873418
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4d217c715e7f423da33ba77ab900f21c473ed2b2
| 677
|
py
|
Python
|
tests/test_io/test_ioresult_container/test_ioresult_equals.py
|
internetimagery/returns
|
8f4f23bae6861fb3969a8d0c8979f96c5589fc61
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_io/test_ioresult_container/test_ioresult_equals.py
|
internetimagery/returns
|
8f4f23bae6861fb3969a8d0c8979f96c5589fc61
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_io/test_ioresult_container/test_ioresult_equals.py
|
internetimagery/returns
|
8f4f23bae6861fb3969a8d0c8979f96c5589fc61
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import absolute_import
from returns.io import IOFailure, IOSuccess
def test_equals():
u"""Ensures that ``.equals`` method works correctly."""
inner_value = 1
assert IOSuccess(inner_value).equals(IOSuccess(inner_value))
assert IOFailure(inner_value).equals(IOFailure(inner_value))
def test_not_equals():
u"""Ensures that ``.equals`` method works correctly."""
inner_value = 1
assert not IOSuccess(inner_value).equals(IOFailure(inner_value))
assert not IOSuccess(inner_value).equals(IOSuccess(0))
assert not IOFailure(inner_value).equals(IOSuccess(inner_value))
assert not IOFailure(inner_value).equals(IOFailure(0))
| 32.238095
| 68
| 0.750369
| 88
| 677
| 5.545455
| 0.261364
| 0.245902
| 0.196721
| 0.153689
| 0.82377
| 0.75
| 0.418033
| 0.25
| 0.25
| 0.25
| 0
| 0.006849
| 0.137371
| 677
| 20
| 69
| 33.85
| 0.828767
| 0.143279
| 0
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.142857
| false
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4d3703f97eac5c695153a94bf4428a54c8c72552
| 207
|
py
|
Python
|
src/apps/sistema/admin.py
|
Ramirocho/agricultores
|
e181069a3881eed36e76247bed1cffe469cec1a6
|
[
"bzip2-1.0.6"
] | null | null | null |
src/apps/sistema/admin.py
|
Ramirocho/agricultores
|
e181069a3881eed36e76247bed1cffe469cec1a6
|
[
"bzip2-1.0.6"
] | null | null | null |
src/apps/sistema/admin.py
|
Ramirocho/agricultores
|
e181069a3881eed36e76247bed1cffe469cec1a6
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.contrib import admin
from apps.sistema.models import registro,compra,tarjetas
# Register your models here.
admin.site.register(registro)
admin.site.register(compra)
admin.site.register(tarjetas)
| 29.571429
| 56
| 0.830918
| 29
| 207
| 5.931034
| 0.517241
| 0.156977
| 0.296512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077295
| 207
| 7
| 57
| 29.571429
| 0.900524
| 0.125604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4ddd2208d16e20adce840c82eb5894abc32b5df7
| 50,683
|
py
|
Python
|
os_brick/tests/initiator/connectors/test_nvmeof.py
|
Mionsz/os-brick
|
7a6a09fc84a779c3ee08d122664f941195eeab8f
|
[
"Apache-2.0"
] | 61
|
2015-02-17T16:18:45.000Z
|
2021-09-16T18:46:01.000Z
|
os_brick/tests/initiator/connectors/test_nvmeof.py
|
Mionsz/os-brick
|
7a6a09fc84a779c3ee08d122664f941195eeab8f
|
[
"Apache-2.0"
] | 2
|
2016-06-17T19:46:03.000Z
|
2022-02-16T02:29:02.000Z
|
os_brick/tests/initiator/connectors/test_nvmeof.py
|
Mionsz/os-brick
|
7a6a09fc84a779c3ee08d122664f941195eeab8f
|
[
"Apache-2.0"
] | 62
|
2015-04-14T19:32:25.000Z
|
2022-03-31T16:32:53.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import builtins
import glob
import os.path
from unittest import mock
import ddt
from oslo_concurrency import processutils as putils
from os_brick import exception
from os_brick import executor
from os_brick.initiator.connectors import nvmeof
from os_brick.initiator import linuxscsi
from os_brick.privileged import rootwrap as priv_rootwrap
from os_brick.tests.initiator import test_connector
TARGET_NQN = 'target.nqn'
EXECUTOR = executor.Executor(None)
VOL_UUID = 'c20aba21-6ef6-446b-b374-45733b4883ba'
NVME_DEVICE_PATH = '/dev/nvme1'
NVME_NS_PATH = '/dev/nvme1n1'
NVME_DEVICE_NGUID = '4941ef7595b8ee978ccf096800f205c6'
SYS_UUID = '9126E942-396D-11E7-B0B7-A81E84C186D1'
HOST_UUID = 'c20aba21-6ef6-446b-b374-45733b4883ba'
HOST_NQN = 'nqn.2014-08.org.nvmexpress:uuid:' \
'beaae2de-3a97-4be1-a739-6ac4bc5bf138'
volume_replicas = [{'target_nqn': 'fakenqn1', 'vol_uuid': 'fakeuuid1',
'portals': [('10.0.0.1', 4420, 'tcp')]},
{'target_nqn': 'fakenqn2', 'vol_uuid': 'fakeuuid2',
'portals': [('10.0.0.2', 4420, 'tcp')]},
{'target_nqn': 'fakenqn3', 'vol_uuid': 'fakeuuid3',
'portals': [('10.0.0.3', 4420, 'tcp')]}]
connection_properties = {
'alias': 'fakealias',
'vol_uuid': 'fakevoluuid',
'volume_replicas': volume_replicas
}
fake_portal = ('fake', 'portal', 'tcp')
nvme_list_subsystems_stdout = """
{
"Subsystems" : [
{
"Name" : "nvme-subsys0",
"NQN" : "nqn.2016-06.io.spdk:cnode1"
},
{
"Paths" : [
{
"Name" : "nvme0",
"Transport" : "tcp",
"Address" : "traddr=10.0.2.15 trsvcid=4420"
}
]
},
{
"Name" : "nvme-subsys1",
"NQN" : "nqn.2016-06.io.spdk:cnode2"
},
{
"Paths" : [
{
"Name" : "nvme1",
"Transport" : "rdma",
"Address" : "traddr=10.0.2.16 trsvcid=4420"
},
{
"Name" : "nvme2",
"Transport" : "rdma",
"Address" : "traddr=10.0.2.17 trsvcid=4420"
}
]
}
]
}
"""
nvme_list_stdout = """
Node SN Model Namespace Usage Format FW Rev
------------- ------- ----- --------- ---------------- ----------- -------
/dev/nvme0n1 AB12345 s123 12682 0.00 B / 2.15 GB 512 B + 0 B 2.1.0.0
/dev/nvme0n2 AB12345 s123 12683 0.00 B / 1.07 GB 512 B + 0 B 2.1.0.0
"""
@ddt.ddt
class NVMeOFConnectorTestCase(test_connector.ConnectorTestCase):
"""Test cases for NVMe initiator class."""
def setUp(self):
super(NVMeOFConnectorTestCase, self).setUp()
self.connector = nvmeof.NVMeOFConnector(None,
execute=self.fake_execute,
use_multipath=False)
@mock.patch.object(priv_rootwrap, 'custom_execute', autospec=True)
def test_nvme_present(self, mock_execute):
nvme_present = self.connector.nvme_present()
self.assertTrue(nvme_present)
@ddt.data(OSError(2, 'FileNotFoundError'), Exception())
@mock.patch('os_brick.initiator.connectors.nvmeof.LOG')
@mock.patch.object(priv_rootwrap, 'custom_execute', autospec=True)
def test_nvme_present_exception(self, exc, mock_execute, mock_log):
mock_execute.side_effect = exc
nvme_present = self.connector.nvme_present()
log = mock_log.debug if isinstance(exc, OSError) else mock_log.warning
log.assert_called_once()
self.assertFalse(nvme_present)
@mock.patch.object(nvmeof.NVMeOFConnector, '_execute', autospec=True)
def test_get_sysuuid_without_newline(self, mock_execute):
mock_execute.return_value = (
"9126E942-396D-11E7-B0B7-A81E84C186D1\n", "")
uuid = self.connector._get_host_uuid()
expected_uuid = "9126E942-396D-11E7-B0B7-A81E84C186D1"
self.assertEqual(expected_uuid, uuid)
@mock.patch.object(nvmeof.NVMeOFConnector, '_execute', autospec=True)
def test_get_sysuuid_err(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
uuid = self.connector._get_host_uuid()
self.assertIsNone(uuid)
@mock.patch.object(nvmeof.NVMeOFConnector, 'nvme_present',
return_value=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_host_nqn',
return_value='fakenqn')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_system_uuid',
return_value=None)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_host_uuid',
return_value=None)
def test_get_connector_properties_without_sysuuid(self, mock_host_uuid,
mock_sysuuid, mock_nqn,
mock_nvme_present):
props = self.connector.get_connector_properties('sudo')
expected_props = {'nqn': 'fakenqn'}
self.assertEqual(expected_props, props)
@mock.patch.object(nvmeof.NVMeOFConnector, 'nvme_present')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_host_nqn', autospec=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_system_uuid',
autospec=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_host_uuid', autospec=True)
def test_get_connector_properties_with_sysuuid(self, mock_host_uuid,
mock_sysuuid, mock_nqn,
mock_nvme_present):
mock_host_uuid.return_value = HOST_UUID
mock_sysuuid.return_value = SYS_UUID
mock_nqn.return_value = HOST_NQN
mock_nvme_present.return_value = True
props = self.connector.get_connector_properties('sudo')
expected_props = {"system uuid": SYS_UUID, "nqn": HOST_NQN,
"uuid": HOST_UUID}
self.assertEqual(expected_props, props)
def test_get_volume_paths_unreplicated(self):
self.assertEqual(self.connector.get_volume_paths(
{'target_nqn': 'fakenqn', 'vol_uuid': 'fakeuuid',
'portals': [('fake', 'portal', 'tcp')]}), [])
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
def test_get_volume_paths_single(self, mock_get_device_path):
mock_get_device_path.return_value = '/dev/nvme1n1'
connection_properties = {
'alias': 'fakealias',
'volume_replicas': [volume_replicas[0]]
}
self.assertEqual(self.connector.get_volume_paths(
connection_properties),
['/dev/nvme1n1'])
mock_get_device_path.assert_called_with(
self.connector, volume_replicas[0]['target_nqn'],
volume_replicas[0]['vol_uuid'])
def test_get_volume_paths_replicated(self):
self.assertEqual(self.connector.get_volume_paths(
connection_properties),
['/dev/md/fakealias'])
def test_get_volume_paths(self):
connection_properties = {
'device_path': '/dev/md/fakealias'
}
self.assertEqual(self.connector.get_volume_paths(
connection_properties),
[connection_properties['device_path']])
@mock.patch.object(nvmeof.NVMeOFConnector, '_execute', autospec=True)
def test__try_connect_nvme_idempotent(self, mock_execute):
cmd = [
'nvme', 'connect',
'-t', 'tcp',
'-n', TARGET_NQN,
'-a', 'portal',
'-s', 4420]
mock_execute.side_effect = putils.ProcessExecutionError(exit_code=70)
self.connector._try_connect_nvme(cmd)
mock_execute.assert_called_once_with(self.connector,
*cmd,
root_helper=None,
run_as_root=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices')
def test__get_device_path(self, mock_nvme_devices):
mock_nvme_devices.return_value = ['/dev/nvme0n1',
'/dev/nvme1n1',
'/dev/nvme0n2']
current_devices = ['/dev/nvme0n1', '/dev/nvme0n2']
self.assertEqual(self.connector._get_device_path(current_devices),
'/dev/nvme1n1')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices')
def test__get_device_path_no_new_device(self, mock_nvme_devices):
current_devices = ['/dev/nvme0n1', '/dev/nvme0n2']
mock_nvme_devices.return_value = current_devices
self.assertRaises(exception.VolumePathsNotFound,
self.connector._get_device_path,
current_devices)
@mock.patch.object(nvmeof.NVMeOFConnector, '_execute', autospec=True)
def test__get_device_path_by_nguid(self, mock_execute):
mock_execute.return_value = '/dev/nvme0n1\n', None
res = self.connector._get_device_path_by_nguid(NVME_DEVICE_NGUID)
self.assertEqual(res, '/dev/nvme0n1')
@mock.patch.object(nvmeof.NVMeOFConnector, '_execute', autospec=True)
def test__get_device_path_by_nguid_empty_response(self, mock_execute):
mock_execute.return_value = None, None
self.assertRaises(exception.VolumePathsNotFound,
self.connector._get_device_path_by_nguid,
NVME_DEVICE_NGUID)
@mock.patch.object(nvmeof.NVMeOFConnector, '_execute', autospec=True)
def test__get_device_path_by_nguid_exception(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector._get_device_path_by_nguid,
NVME_DEVICE_NGUID)
@mock.patch.object(nvmeof.NVMeOFConnector, '_connect_target_volume')
def test_connect_volume_single_rep(
self, mock_connect_target_volume):
connection_properties1 = {
'target_nqn': 'fakenqn',
'vol_uuid': 'fakeuuid',
'volume_replicas': [volume_replicas[0]]
}
mock_connect_target_volume.return_value = '/dev/nvme0n1'
self.assertEqual(
self.connector.connect_volume(connection_properties1),
{'type': 'block', 'path': '/dev/nvme0n1'})
mock_connect_target_volume.assert_called_with(
connection_properties1['volume_replicas'][0]['target_nqn'],
connection_properties1['volume_replicas'][0]['vol_uuid'],
connection_properties1['volume_replicas'][0]['portals'])
@mock.patch.object(nvmeof.NVMeOFConnector, '_connect_target_volume')
def test_connect_volume_unreplicated(
self, mock_connect_target_volume):
mock_connect_target_volume.return_value = '/dev/nvme0n1'
self.assertEqual(
self.connector._connect_volume_replicated(
{
'target_nqn': 'fakenqn',
'vol_uuid': 'fakeuuid',
'portals': [('fake', 'portal', 'tcp')]
}
),
{'type': 'block', 'path': '/dev/nvme0n1'})
mock_connect_target_volume.assert_called_with(
'fakenqn', 'fakeuuid', [('fake', 'portal', 'tcp')])
@mock.patch.object(nvmeof.NVMeOFConnector, '_handle_replicated_volume')
@mock.patch.object(nvmeof.NVMeOFConnector, '_connect_target_volume')
def test_connect_volume_replicated(
self, mock_connect_target_volume, mock_replicated_volume):
mock_connect_target_volume.side_effect = (
'/dev/nvme0n1', '/dev/nvme1n2', '/dev/nvme2n1')
mock_replicated_volume.return_value = '/dev/md/md1'
actual = self.connector.connect_volume(connection_properties)
mock_connect_target_volume.assert_any_call(
'fakenqn1', 'fakeuuid1', [('10.0.0.1', 4420, 'tcp')])
mock_connect_target_volume.assert_any_call(
'fakenqn2', 'fakeuuid2', [('10.0.0.2', 4420, 'tcp')])
mock_connect_target_volume.assert_any_call(
'fakenqn3', 'fakeuuid3', [('10.0.0.3', 4420, 'tcp')])
mock_replicated_volume.assert_called_with(
['/dev/nvme0n1', '/dev/nvme1n2', '/dev/nvme2n1'],
connection_properties['alias'],
len(connection_properties['volume_replicas']))
self.assertEqual(actual, {'type': 'block', 'path': '/dev/md/md1'})
@mock.patch.object(nvmeof.NVMeOFConnector, '_handle_replicated_volume')
@mock.patch.object(nvmeof.NVMeOFConnector, '_connect_target_volume')
def test_connect_volume_replicated_exception(
self, mock_connect_target_volume, mock_replicated_volume):
mock_connect_target_volume.side_effect = Exception()
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector.connect_volume, connection_properties)
@mock.patch.object(linuxscsi.LinuxSCSI, 'flush_device_io', autospec=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices',
autospec=True)
@mock.patch('os_brick.utils._time_sleep')
def test_disconnect_volume_nova(self, mock_sleep,
mock_devices,
mock_flush):
device = '/dev/nvme0n1'
connection_properties = {'target_portal': 'portal',
'target_port': 1,
'nqn': 'nqn.volume_123',
'device_path': device,
'transport_type': 'rdma'}
mock_devices.return_value = [device]
self.connector.disconnect_volume(connection_properties, None)
mock_flush.assert_called_once_with(mock.ANY, device)
@mock.patch.object(linuxscsi.LinuxSCSI, 'flush_device_io', autospec=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices',
autospec=True)
@mock.patch('os_brick.utils._time_sleep')
def test_disconnect_volume_cinder(self, mock_sleep,
mock_devices,
mock_flush):
device = '/dev/nvme0n1'
connection_properties = {'target_portal': 'portal',
'target_port': 1,
'nqn': 'nqn.volume_123',
'transport_type': 'rdma'}
device_info = {'path': device}
mock_devices.return_value = [device]
self.connector.disconnect_volume(connection_properties,
device_info,
ignore_errors=True)
mock_flush.assert_called_once_with(mock.ANY, device)
@mock.patch.object(linuxscsi.LinuxSCSI, 'flush_device_io', autospec=True)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices',
autospec=True)
@mock.patch('os_brick.utils._time_sleep')
def test_disconnect_volume_force_ignore_errors(self, mock_sleep,
mock_devices,
mock_flush):
device = '/dev/nvme0n1'
mock_flush.side_effect = putils.ProcessExecutionError
mock_devices.return_value = [device]
connection_properties = {'target_portal': 'portal',
'target_port': 1,
'nqn': 'nqn.volume_123',
'device_path': device,
'transport_type': 'rdma'}
res = self.connector.disconnect_volume(connection_properties,
None,
force=True,
ignore_errors=True)
self.assertIsNone(res)
mock_flush.assert_called_once_with(mock.ANY, device)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_fs_type')
def test_disconnect_unreplicated_volume_nova(self, mock_get_fs_type):
connection_properties = {
'vol_uuid': 'fakeuuid',
'portals': [('10.0.0.1', 4420, 'tcp')],
'target_nqn': 'fakenqn',
'device_path': '/dev/nvme0n1'
}
mock_get_fs_type.return_value = 'linux_raid_member'
self.connector._disconnect_volume_replicated(
connection_properties, None)
mock_get_fs_type.assert_called_with(
connection_properties['device_path'])
@mock.patch.object(nvmeof.NVMeOFConnector, 'end_raid')
def test_disconnect_replicated_volume_no_device_path(self, mock_end_raid):
mock_end_raid.return_value = None
self.connector.disconnect_volume(connection_properties, None)
device_path = '/dev/md/' + connection_properties['alias']
mock_end_raid.assert_called_with(self.connector, device_path)
@mock.patch.object(nvmeof.NVMeOFConnector, 'end_raid')
def test_disconnect_replicated_volume_nova(self, mock_end_raid):
connection_properties = {
'vol_uuid': 'fakeuuid',
'volume_replicas': volume_replicas,
'device_path': '/dev/md/md1'
}
self.connector.disconnect_volume(connection_properties, None)
mock_end_raid.assert_called_with(self.connector, '/dev/md/md1')
def test_disconnect_unreplicated_volume_cinder(self):
connection_properties = {
'vol_uuid': 'fakeuuid',
'portals': [('10.0.0.1', 4420, 'tcp')],
'target_nqn': 'fakenqn',
}
device_info = {'path': '/dev/nvme0n1'}
self.connector._disconnect_volume_replicated(
connection_properties, device_info, ignore_errors=True)
@mock.patch.object(nvmeof.NVMeOFConnector, 'end_raid')
def test_disconnect_replicated_volume_cinder(self, mock_end_raid):
device_info = {'path': '/dev/md/md1'}
self.connector.disconnect_volume(connection_properties,
device_info,
ignore_errors=True)
mock_end_raid.assert_called_with(self.connector, '/dev/md/md1')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
@mock.patch.object(linuxscsi.LinuxSCSI, 'get_device_size')
def test_extend_volume_unreplicated(
self, mock_device_size, mock_device_path):
connection_properties = {
'target_nqn': 'fakenqn',
'vol_uuid': 'fakeuuid',
'volume_replicas': [volume_replicas[0]]
}
mock_device_path.return_value = '/dev/nvme0n1'
mock_device_size.return_value = 100
self.assertEqual(
self.connector.extend_volume(connection_properties),
100)
mock_device_path.assert_called_with(
self.connector, volume_replicas[0]['target_nqn'],
volume_replicas[0]['vol_uuid'])
mock_device_size.assert_called_with('/dev/nvme0n1')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
@mock.patch.object(linuxscsi.LinuxSCSI, 'get_device_size')
def test_extend_volume_unreplicated_no_replica(
self, mock_device_size, mock_device_path):
connection_properties = {
'target_nqn': 'fakenqn',
'vol_uuid': 'fakeuuid'
}
mock_device_path.return_value = '/dev/nvme0n1'
mock_device_size.return_value = 100
self.assertEqual(
self.connector._extend_volume_replicated(
connection_properties), 100)
mock_device_path.assert_called_with(
self.connector, 'fakenqn', 'fakeuuid')
mock_device_size.assert_called_with('/dev/nvme0n1')
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_mdadm')
@mock.patch.object(linuxscsi.LinuxSCSI, 'get_device_size')
def test_extend_volume_replicated(
self, mock_device_size, mock_mdadm):
mock_device_size.return_value = 100
self.assertEqual(
self.connector.extend_volume(connection_properties),
100)
device_path = '/dev/md/' + connection_properties['alias']
mock_mdadm.assert_called_with(
self.connector, ['mdadm', '--grow', '--size', 'max', device_path])
mock_device_size.assert_called_with(device_path)
@mock.patch.object(linuxscsi.LinuxSCSI, 'get_device_size')
def test_extend_volume_with_nguid(self, mock_device_size):
device_path = '/dev/nvme0n1'
connection_properties = {
'volume_nguid': NVME_DEVICE_NGUID,
'device_path': device_path,
}
mock_device_size.return_value = 100
self.assertEqual(
self.connector.extend_volume(connection_properties),
100
)
mock_device_size.assert_called_with(device_path)
@mock.patch.object(nvmeof.NVMeOFConnector, 'rescan')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
def test__connect_target_volume_with_connected_device(
self, mock_device_path, mock_rescan):
mock_device_path.return_value = '/dev/nvme0n1'
self.assertEqual(
self.connector._connect_target_volume(
'fakenqn', 'fakeuuid', [('fake', 'portal', 'tcp')]),
'/dev/nvme0n1')
mock_device_path.assert_called_with(
self.connector, 'fakenqn', 'fakeuuid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'connect_to_portals')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
def test__connect_target_volume_not_connected(
self, mock_device_path, mock_portals):
mock_device_path.side_effect = exception.VolumeDeviceNotFound()
mock_portals.return_value = True
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector._connect_target_volume, TARGET_NQN,
VOL_UUID, [('fake', 'portal', 'tcp')])
mock_device_path.assert_called_with(
self.connector, TARGET_NQN, VOL_UUID)
@mock.patch.object(nvmeof.NVMeOFConnector, 'connect_to_portals')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
def test__connect_target_volume_no_portals_con(
self, mock_device_path, mock_portals):
mock_device_path.return_value = None
mock_portals.return_value = None
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector._connect_target_volume, 'fakenqn',
'fakeuuid', [fake_portal])
mock_device_path.assert_called_with(
self.connector, 'fakenqn', 'fakeuuid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'connect_to_portals')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_nvme_device_path')
def test__connect_target_volume_new_device_path(
self, mock_device_path, mock_connect_portal):
mock_device_path.side_effect = (None, '/dev/nvme0n1')
self.assertEqual(
self.connector._connect_target_volume(
'fakenqn', 'fakeuuid', [('fake', 'portal', 'tcp')]),
'/dev/nvme0n1')
mock_connect_portal.assert_called_with(
self.connector, 'fakenqn', [('fake', 'portal', 'tcp')])
mock_device_path.assert_called_with(
self.connector, 'fakenqn', 'fakeuuid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_nvme_cli')
def test_connect_to_portals(self, mock_nvme_cli):
nvme_command = (
'connect', '-a', '10.0.0.1', '-s', 4420, '-t',
'tcp', '-n', 'fakenqn', '-Q', '128', '-l', '-1')
self.assertEqual(
self.connector.connect_to_portals(
self.connector, 'fakenqn', [('10.0.0.1', 4420, 'tcp')]),
True)
mock_nvme_cli.assert_called_with(self.connector, nvme_command)
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_nvme_cli')
def test_connect_to_portals_rdma_no_conn(self, mock_nvme_cli):
mock_nvme_cli.side_effect = Exception()
nvme_command = (
'connect', '-a', '10.0.0.1', '-s', 4420, '-t',
'rdma', '-n', 'fakenqn', '-Q', '128', '-l', '-1')
self.assertEqual(
self.connector.connect_to_portals(
self.connector, 'fakenqn', [('10.0.0.1', 4420, 'RoCEv2')]),
False)
mock_nvme_cli.assert_called_with(self.connector, nvme_command)
@mock.patch.object(nvmeof.NVMeOFConnector, 'stop_and_assemble_raid')
@mock.patch.object(nvmeof.NVMeOFConnector, '_is_device_in_raid')
def test_handle_replicated_volume_existing(
self, mock_device_raid, mock_stop_assemble_raid):
mock_device_raid.return_value = True
self.assertEqual(
self.connector._handle_replicated_volume(
['/dev/nvme1n1', '/dev/nvme1n2', '/dev/nvme1n3'],
'fakealias', 3),
'/dev/md/fakealias')
mock_device_raid.assert_called_with(self.connector, '/dev/nvme1n1')
mock_stop_assemble_raid.assert_called_with(
self.connector, ['/dev/nvme1n1', '/dev/nvme1n2', '/dev/nvme1n3'],
'/dev/md/fakealias', False)
@mock.patch.object(nvmeof.NVMeOFConnector, '_is_device_in_raid')
def test_handle_replicated_volume_not_found(
self, mock_device_raid):
mock_device_raid.return_value = False
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector._handle_replicated_volume,
['/dev/nvme1n1', '/dev/nvme1n2', '/dev/nvme1n3'],
'fakealias', 4)
mock_device_raid.assert_any_call(self.connector, '/dev/nvme1n1')
mock_device_raid.assert_any_call(self.connector, '/dev/nvme1n2')
mock_device_raid.assert_any_call(self.connector, '/dev/nvme1n3')
@mock.patch.object(nvmeof.NVMeOFConnector, 'create_raid')
@mock.patch.object(nvmeof.NVMeOFConnector, '_is_device_in_raid')
def test_handle_replicated_volume_new(
self, mock_device_raid, mock_create_raid):
mock_device_raid.return_value = False
self.assertEqual(
self.connector._handle_replicated_volume(
['/dev/nvme1n1', '/dev/nvme1n2', '/dev/nvme1n3'],
'fakealias', 3),
'/dev/md/fakealias')
mock_device_raid.assert_any_call(self.connector, '/dev/nvme1n1')
mock_device_raid.assert_any_call(self.connector, '/dev/nvme1n2')
mock_device_raid.assert_any_call(self.connector, '/dev/nvme1n3')
mock_create_raid.assert_called_with(
self.connector, ['/dev/nvme1n1', '/dev/nvme1n2', '/dev/nvme1n3'],
'1', 'fakealias', 'fakealias', False)
@mock.patch.object(nvmeof.NVMeOFConnector, 'ks_readlink')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_md_name')
def test_stop_and_assemble_raid_existing_simple(
self, mock_md_name, mock_readlink):
mock_readlink.return_value = ''
mock_md_name.return_value = 'mdalias'
self.assertIsNone(self.connector.stop_and_assemble_raid(
self.connector, ['/dev/sda'], '/dev/md/mdalias', False))
mock_md_name.assert_called_with(self.connector, 'sda')
mock_readlink.assert_called_with('/dev/md/mdalias')
@mock.patch.object(nvmeof.NVMeOFConnector, 'ks_readlink')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_md_name')
def test_stop_and_assemble_raid(
self, mock_md_name, mock_readlink):
mock_readlink.return_value = '/dev/md/mdalias'
mock_md_name.return_value = 'mdalias'
self.assertIsNone(self.connector.stop_and_assemble_raid(
self.connector, ['/dev/sda'], '/dev/md/mdalias', False))
mock_md_name.assert_called_with(self.connector, 'sda')
mock_readlink.assert_called_with('/dev/md/mdalias')
@mock.patch.object(nvmeof.NVMeOFConnector, 'assemble_raid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'ks_readlink')
@mock.patch.object(nvmeof.NVMeOFConnector, 'get_md_name')
def test_stop_and_assemble_raid_err(self, mock_md_name, mock_readlink,
mock_assemble):
mock_readlink.return_value = '/dev/md/mdalias'
mock_md_name.return_value = 'dummy'
mock_assemble.side_effect = Exception()
self.assertIsNone(self.connector.stop_and_assemble_raid(
self.connector, ['/dev/sda'], '/dev/md/mdalias', False))
mock_md_name.assert_called_with(self.connector, 'sda')
mock_readlink.assert_called_with('/dev/md/mdalias')
mock_assemble.assert_called_with(self.connector, ['/dev/sda'],
'/dev/md/mdalias', False)
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_mdadm')
def test_assemble_raid_simple(self, mock_run_mdadm):
self.assertEqual(self.connector.assemble_raid(
self.connector, ['/dev/sda'], '/dev/md/md1', True), True)
mock_run_mdadm.assert_called_with(
self.connector,
['mdadm', '--assemble', '--run', '/dev/md/md1', '-o', '/dev/sda'],
True)
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_mdadm')
def test_assemble_raid_simple_err(self, mock_run_mdadm):
mock_run_mdadm.side_effect = putils.ProcessExecutionError()
self.assertRaises(putils.ProcessExecutionError,
self.connector.assemble_raid, self.connector,
['/dev/sda'], '/dev/md/md1', True)
mock_run_mdadm.assert_called_with(
self.connector,
['mdadm', '--assemble', '--run', '/dev/md/md1', '-o', '/dev/sda'],
True)
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_mdadm')
def test_create_raid_cmd_simple(self, mock_run_mdadm):
self.assertIsNone(self.connector.create_raid(
self.connector, ['/dev/sda'], '1', 'md1', 'name', True))
mock_run_mdadm.assert_called_with(
self.connector,
['mdadm', '-C', '-o', 'md1', '-R', '-N', 'name', '--level', '1',
'--raid-devices=1', '--bitmap=internal', '--homehost=any',
'--failfast', '--assume-clean', '/dev/sda'])
@mock.patch.object(nvmeof.NVMeOFConnector, 'stop_raid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'is_raid_exists')
def test_end_raid_simple(self, mock_raid_exists, mock_stop_raid):
mock_raid_exists.return_value = True
mock_stop_raid.return_value = False
self.assertIsNone(self.connector.end_raid(
self.connector, '/dev/md/md1'))
mock_raid_exists.assert_called_with(self.connector, '/dev/md/md1')
mock_stop_raid.assert_called_with(self.connector, '/dev/md/md1')
@mock.patch.object(os.path, 'exists')
@mock.patch.object(nvmeof.NVMeOFConnector, 'stop_raid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'is_raid_exists')
def test_end_raid(self, mock_raid_exists, mock_stop_raid, mock_os):
mock_raid_exists.return_value = True
mock_stop_raid.return_value = False
mock_os.return_value = True
self.assertIsNone(self.connector.end_raid(
self.connector, '/dev/md/md1'))
mock_raid_exists.assert_called_with(self.connector, '/dev/md/md1')
mock_stop_raid.assert_called_with(self.connector, '/dev/md/md1')
mock_os.assert_called_with('/dev/md/md1')
@mock.patch.object(os.path, 'exists')
@mock.patch.object(nvmeof.NVMeOFConnector, 'stop_raid')
@mock.patch.object(nvmeof.NVMeOFConnector, 'is_raid_exists')
def test_end_raid_err(self, mock_raid_exists, mock_stop_raid, mock_os):
mock_raid_exists.return_value = True
mock_stop_raid.side_effect = Exception()
mock_os.return_value = True
self.assertIsNone(self.connector.end_raid(
self.connector, '/dev/md/md1'))
mock_raid_exists.assert_called_with(self.connector, '/dev/md/md1')
mock_stop_raid.assert_called_with(self.connector, '/dev/md/md1')
mock_os.assert_called_with('/dev/md/md1')
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_mdadm')
def test_stop_raid_simple(self, mock_run_mdadm):
mock_run_mdadm.return_value = 'mdadm output'
self.assertEqual(self.connector.stop_raid(
self.connector, '/dev/md/md1'), 'mdadm output')
mock_run_mdadm.assert_called_with(
self.connector, ['mdadm', '--stop', '/dev/md/md1'])
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_mdadm')
def test_remove_raid_simple(self, mock_run_mdadm):
self.assertIsNone(self.connector.remove_raid(
self.connector, '/dev/md/md1'))
mock_run_mdadm.assert_called_with(
self.connector, ['mdadm', '--remove', '/dev/md/md1'])
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_nvme_cli')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_controller')
def test_rescan(self, mock_get_nvme_controller, mock_run_nvme_cli):
mock_get_nvme_controller.return_value = 'nvme1'
mock_run_nvme_cli.return_value = None
result = self.connector.rescan(EXECUTOR, TARGET_NQN, VOL_UUID)
self.assertIsNone(result)
mock_get_nvme_controller.assert_called_with(EXECUTOR, TARGET_NQN)
nvme_command = ('ns-rescan', NVME_DEVICE_PATH)
mock_run_nvme_cli.assert_called_with(EXECUTOR, nvme_command)
@mock.patch.object(nvmeof.NVMeOFConnector, 'run_nvme_cli')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_controller')
def test_rescan_err(self, mock_get_nvme_controller, mock_run_nvme_cli):
mock_get_nvme_controller.return_value = 'nvme1'
mock_run_nvme_cli.side_effect = Exception()
self.assertRaises(exception.CommandExecutionFailed,
self.connector.rescan, EXECUTOR, TARGET_NQN,
VOL_UUID)
mock_get_nvme_controller.assert_called_with(EXECUTOR, TARGET_NQN)
nvme_command = ('ns-rescan', NVME_DEVICE_PATH)
mock_run_nvme_cli.assert_called_with(EXECUTOR, nvme_command)
@mock.patch.object(executor.Executor, '_execute')
def test_is_raid_exists_not(self, mock_execute):
mock_execute.return_value = (VOL_UUID + "\n", "")
result = self.connector.is_raid_exists(EXECUTOR, NVME_DEVICE_PATH)
self.assertEqual(False, result)
cmd = ['mdadm', '--detail', NVME_DEVICE_PATH]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_is_raid_exists(self, mock_execute):
mock_execute.return_value = (NVME_DEVICE_PATH + ':' + "\n", "")
result = self.connector.is_raid_exists(EXECUTOR, NVME_DEVICE_PATH)
self.assertEqual(True, result)
cmd = ['mdadm', '--detail', NVME_DEVICE_PATH]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_is_raid_exists_err(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError
result = self.connector.is_raid_exists(EXECUTOR, NVME_DEVICE_PATH)
self.assertEqual(False, result)
cmd = ['mdadm', '--detail', NVME_DEVICE_PATH]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_get_md_name(self, mock_execute):
mock_execute.return_value = ('nvme1' + "\n", "")
result = self.connector.get_md_name(EXECUTOR, NVME_DEVICE_PATH)
self.assertEqual('nvme1', result)
get_md_cmd = 'cat /proc/mdstat | grep /dev/nvme1 | awk \'{print $1;}\''
cmd = ['bash', '-c', get_md_cmd]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_get_md_name_err(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
result = self.connector.get_md_name(EXECUTOR, NVME_DEVICE_PATH)
self.assertIsNone(result)
get_md_cmd = 'cat /proc/mdstat | grep /dev/nvme1 | awk \'{print $1;}\''
cmd = ['bash', '-c', get_md_cmd]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_is_device_in_raid(self, mock_execute):
mock_execute.return_value = (NVME_DEVICE_PATH + ':' + "\n", "")
result = self.connector._is_device_in_raid(self.connector,
NVME_DEVICE_PATH)
self.assertEqual(True, result)
cmd = ['mdadm', '--examine', NVME_DEVICE_PATH]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_is_device_in_raid_not_found(self, mock_execute):
mock_execute.return_value = (VOL_UUID + "\n", "")
result = self.connector._is_device_in_raid(self.connector,
NVME_DEVICE_PATH)
self.assertEqual(False, result)
cmd = ['mdadm', '--examine', NVME_DEVICE_PATH]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_is_device_in_raid_err(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
result = self.connector._is_device_in_raid(self.connector,
NVME_DEVICE_PATH)
self.assertEqual(False, result)
cmd = ['mdadm', '--examine', NVME_DEVICE_PATH]
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_run_mdadm(self, mock_execute):
mock_execute.return_value = (VOL_UUID + "\n", "")
cmd = ['mdadm', '--examine', NVME_DEVICE_PATH]
result = self.connector.run_mdadm(EXECUTOR, cmd)
self.assertEqual(VOL_UUID, result)
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
def test_run_mdadm_err(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
cmd = ['mdadm', '--examine', NVME_DEVICE_PATH]
result = self.connector.run_mdadm(EXECUTOR, cmd)
self.assertIsNone(result)
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
@mock.patch.object(executor.Executor, '_execute')
@mock.patch.object(glob, 'glob')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_controller')
def test_get_nvme_device_path(self, mock_get_nvme_controller, mock_glob,
mock_execute):
mock_get_nvme_controller.return_value = 'nvme1'
block_dev_path = '/sys/class/nvme-fabrics/ctl/nvme1/nvme1n*'
mock_glob.side_effect = [['/sys/class/nvme-fabrics/ctl/nvme1/nvme1n1']]
mock_execute.return_value = (VOL_UUID + "\n", "")
cmd = ['cat', '/sys/class/nvme-fabrics/ctl/nvme1/nvme1n1/uuid']
result = self.connector.get_nvme_device_path(EXECUTOR, TARGET_NQN,
VOL_UUID)
mock_get_nvme_controller.assert_called_with(EXECUTOR, TARGET_NQN)
self.assertEqual(NVME_NS_PATH, result)
mock_glob.assert_any_call(block_dev_path)
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
def execute_side_effect(self, value, run_as_root, root_helper):
if 'nqn' in value:
return TARGET_NQN + "\n", ""
if 'state' in value:
return 'live' + "\n", ""
def execute_side_effect_not_live(self, value, run_as_root, root_helper):
if 'nqn' in value:
return TARGET_NQN + "\n", ""
if 'state' in value:
return 'dead' + "\n", ""
def execute_side_effect_not_found(self, value, run_as_root, root_helper):
if 'nqn' in value:
return "dummy" + "\n", ""
if 'state' in value:
return 'live' + "\n", ""
@mock.patch.object(executor.Executor, '_execute',
side_effect=execute_side_effect)
@mock.patch.object(glob, 'glob')
def test_get_nvme_controller(self, mock_glob, mock_execute):
ctrl_path = '/sys/class/nvme-fabrics/ctl/nvme*'
mock_glob.side_effect = [['/sys/class/nvme-fabrics/ctl/nvme1']]
cmd = ['cat', '/sys/class/nvme-fabrics/ctl/nvme1/state']
result = self.connector._get_nvme_controller(EXECUTOR, TARGET_NQN)
self.assertEqual('nvme1', result)
mock_glob.assert_any_call(ctrl_path)
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
@mock.patch.object(executor.Executor, '_execute',
side_effect=execute_side_effect_not_live)
@mock.patch.object(glob, 'glob')
def test_get_nvme_controller_not_live(self, mock_glob, mock_execute):
ctrl_path = '/sys/class/nvme-fabrics/ctl/nvme*'
mock_glob.side_effect = [['/sys/class/nvme-fabrics/ctl/nvme1']]
cmd = ['cat', '/sys/class/nvme-fabrics/ctl/nvme1/state']
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector._get_nvme_controller, EXECUTOR,
TARGET_NQN)
mock_glob.assert_any_call(ctrl_path)
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
@mock.patch.object(executor.Executor, '_execute',
side_effect=execute_side_effect_not_found)
@mock.patch.object(glob, 'glob')
def test_get_nvme_controller_not_found(self, mock_glob, mock_execute):
ctrl_path = '/sys/class/nvme-fabrics/ctl/nvme*'
mock_glob.side_effect = [['/sys/class/nvme-fabrics/ctl/nvme1']]
cmd = ['cat', '/sys/class/nvme-fabrics/ctl/nvme1/state']
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector._get_nvme_controller, EXECUTOR,
TARGET_NQN)
mock_glob.assert_any_call(ctrl_path)
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
@mock.patch.object(builtins, 'open')
def test_get_host_nqn_file_available(self, mock_open):
mock_open.return_value.__enter__.return_value.read = (
lambda: HOST_NQN + "\n")
host_nqn = self._get_host_nqn()
mock_open.assert_called_once_with('/etc/nvme/hostnqn', 'r')
self.assertEqual(HOST_NQN, host_nqn)
@mock.patch.object(nvmeof.priv_nvme, 'create_hostnqn')
@mock.patch.object(builtins, 'open')
def test_get_host_nqn_io_err(self, mock_open, mock_create):
mock_create.return_value = mock.sentinel.nqn
mock_open.side_effect = IOError()
result = self.connector._get_host_nqn()
mock_open.assert_called_once_with('/etc/nvme/hostnqn', 'r')
mock_create.assert_called_once_with()
self.assertEqual(mock.sentinel.nqn, result)
@mock.patch.object(nvmeof.priv_nvme, 'create_hostnqn')
@mock.patch.object(builtins, 'open')
def test_get_host_nqn_err(self, mock_open, mock_create):
mock_open.side_effect = Exception()
result = self.connector._get_host_nqn()
mock_open.assert_called_once_with('/etc/nvme/hostnqn', 'r')
mock_create.assert_not_called()
self.assertIsNone(result)
@mock.patch.object(executor.Executor, '_execute')
def test_run_nvme_cli(self, mock_execute):
mock_execute.return_value = ("\n", "")
cmd = 'dummy command'
result = self.connector.run_nvme_cli(EXECUTOR, cmd)
self.assertEqual(("\n", ""), result)
def test_ks_readlink(self):
dest = 'dummy path'
result = self.connector.ks_readlink(dest)
self.assertEqual('', result)
@mock.patch.object(executor.Executor, '_execute')
def test_get_fs_type_err(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
result = self.connector._get_fs_type(NVME_DEVICE_PATH)
self.assertIsNone(result)
cmd = ['blkid', NVME_DEVICE_PATH, '-s', 'TYPE', '-o', 'value']
args, kwargs = mock_execute.call_args
self.assertEqual(args[0], cmd[0])
self.assertEqual(args[1], cmd[1])
self.assertEqual(args[2], cmd[2])
self.assertEqual(args[3], cmd[3])
self.assertEqual(args[4], cmd[4])
self.assertEqual(args[5], cmd[5])
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices')
def test__is_nvme_available(self, mock_nvme_devices):
mock_nvme_devices.return_value = {'/dev/nvme0n1',
'/dev/nvme2n1',
'/dev/nvme2n2',
'/dev/nvme3n1'}
result = self.connector._is_nvme_available('nvme2')
self.assertTrue(result)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices')
def test__is_nvme_available_wrong_name(self, mock_nvme_devices):
mock_nvme_devices.return_value = {'/dev/nvme0n1',
'/dev/nvme2n1',
'/dev/nvme2n2',
'/dev/nvme3n1'}
self.assertRaises(exception.NotFound,
self.connector._is_nvme_available,
'nvme1')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices')
def test__is_nvme_available_no_devices(self, mock_nvme_devices):
mock_nvme_devices.return_value = []
self.assertRaises(exception.NotFound,
self.connector._is_nvme_available,
'nvme1')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_devices')
def test__is_nvme_available_fail_to_get_devices(self, mock_nvme_devices):
mock_nvme_devices.side_effect = exception.CommandExecutionFailed()
self.assertRaises(exception.CommandExecutionFailed,
self.connector._is_nvme_available,
'nvme1')
@mock.patch.object(executor.Executor, '_execute')
def test__get_nvme_devices(self, mock_execute):
mock_execute.return_value = nvme_list_stdout, None
res = self.connector._get_nvme_devices()
self.assertEqual(set(res), {'/dev/nvme0n1', '/dev/nvme0n2'})
@mock.patch.object(executor.Executor, '_execute')
def test__get_nvme_devices_failed(self, mock_execute):
mock_execute.side_effect = putils.ProcessExecutionError()
self.assertRaises(exception.CommandExecutionFailed,
self.connector._get_nvme_devices)
@mock.patch.object(nvmeof.NVMeOFConnector, '_is_nvme_available')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_subsys')
def test__wait_for_blk(self, mock_nvme_subsys, mock_nvme_avail):
mock_nvme_subsys.return_value = nvme_list_subsystems_stdout, None
mock_nvme_avail.return_value = True
result = self.connector._wait_for_blk('rdma',
'nqn.2016-06.io.spdk:cnode2',
'10.0.2.16', '4420')
self.assertTrue(result)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_subsys')
def test__wait_for_blk_cli_exception(self, mock_nvme_subsys):
mock_nvme_subsys.side_effect = putils.ProcessExecutionError()
self.assertRaises(putils.ProcessExecutionError,
self.connector._wait_for_blk,
'rdma',
'nqn.2016-06.io.spdk:cnode2',
'10.0.2.16', '4420')
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_subsys')
def test__wait_for_blk_bad_json(self, mock_nvme_subsys):
mock_nvme_subsys.return_value = ".", None
result = self.connector._wait_for_blk('rdma',
'nqn.2016-06.io.spdk:cnode2',
'10.0.2.16', '4420')
self.assertFalse(result)
@mock.patch.object(nvmeof.NVMeOFConnector, '_get_nvme_subsys')
def test__wait_for_blk_ip_not_found(self, mock_nvme_subsys):
mock_nvme_subsys.return_value = nvme_list_subsystems_stdout, None
result = self.connector._wait_for_blk('rdma',
'nqn.2016-06.io.spdk:cnode2',
'10.0.2.18', '4420')
self.assertFalse(result)
def _get_host_nqn(self):
try:
with open('/etc/nvme/hostnqn', 'r') as f:
host_nqn = f.read().strip()
f.close()
except IOError:
host_nqn = HOST_NQN
return host_nqn
| 46.755535
| 79
| 0.631415
| 5,793
| 50,683
| 5.196444
| 0.065769
| 0.060027
| 0.058798
| 0.057204
| 0.82613
| 0.785071
| 0.744677
| 0.70282
| 0.67
| 0.628442
| 0
| 0.021228
| 0.249926
| 50,683
| 1,083
| 80
| 46.798707
| 0.770623
| 0.011582
| 0
| 0.578077
| 0
| 0.002068
| 0.146765
| 0.023223
| 0
| 0
| 0
| 0
| 0.198552
| 1
| 0.092037
| false
| 0
| 0.01241
| 0
| 0.11272
| 0.002068
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
150a38160a67bc9a822b4652ca0ed47ac254daad
| 56
|
py
|
Python
|
pystocktwits/__init__.py
|
GlebRed/pystocktwits
|
48a07ba3744d8cd1d636a646be86f73c662b0317
|
[
"MIT"
] | 19
|
2019-02-05T14:23:09.000Z
|
2021-11-16T13:17:15.000Z
|
pystocktwits/__init__.py
|
StockScripts/pystocktwits
|
31c16fe67b2b80ded532a8e3fbff140ce538d4dc
|
[
"MIT"
] | 1
|
2020-01-31T22:04:30.000Z
|
2020-01-31T22:04:30.000Z
|
pystocktwits/__init__.py
|
StockScripts/pystocktwits
|
31c16fe67b2b80ded532a8e3fbff140ce538d4dc
|
[
"MIT"
] | 6
|
2020-08-20T17:26:42.000Z
|
2021-05-27T05:18:48.000Z
|
from .pystocktwits import Streamer
from .utils import *
| 18.666667
| 34
| 0.803571
| 7
| 56
| 6.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 35
| 28
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
129437085fe5575eeb0e302335d1bcd212f0d6c9
| 561
|
py
|
Python
|
fdk_client/platform/models/DisplayBreakup.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/DisplayBreakup.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/DisplayBreakup.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
"""Platform Models."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
class DisplayBreakup(BaseSchema):
# Cart swagger.json
message = fields.List(fields.Str(required=False), required=False)
currency_code = fields.Str(required=False)
key = fields.Str(required=False)
value = fields.Float(required=False)
display = fields.Str(required=False)
currency_symbol = fields.Str(required=False)
| 14.763158
| 69
| 0.682709
| 62
| 561
| 6.145161
| 0.451613
| 0.238845
| 0.223097
| 0.288714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.219251
| 561
| 37
| 70
| 15.162162
| 0.869863
| 0.062389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.363636
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1299878a3ee439134c4d3ca32e8c3de0bb5e920e
| 146
|
py
|
Python
|
idm/commands/signals/__init__.py
|
ruslanvolov6667/ID
|
d2e2e0d8764bb9bc9caad163d69de03972cb5380
|
[
"MIT"
] | null | null | null |
idm/commands/signals/__init__.py
|
ruslanvolov6667/ID
|
d2e2e0d8764bb9bc9caad163d69de03972cb5380
|
[
"MIT"
] | null | null | null |
idm/commands/signals/__init__.py
|
ruslanvolov6667/ID
|
d2e2e0d8764bb9bc9caad163d69de03972cb5380
|
[
"MIT"
] | null | null | null |
from .ping import ping
from .info import sinfo
from .repiat import repiat
from .test import test
from .wikiosif import WIadd, WIhelp, WIremove
| 29.2
| 45
| 0.780822
| 22
| 146
| 5.181818
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171233
| 146
| 5
| 45
| 29.2
| 0.942149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
12c32db90b7a3976a4e8699de22e346a13a7fdd8
| 129
|
py
|
Python
|
cptk/defaults/templates/py/.cptk/template/{{slug(problem.name)}}.py
|
RealA10N/cptk
|
e500d948e91bb70661adc3c2539b149704c734a1
|
[
"Apache-2.0"
] | 5
|
2021-12-25T01:49:45.000Z
|
2022-03-27T10:30:14.000Z
|
cptk/defaults/templates/py/.cptk/template/{{slug(problem.name)}}.py
|
RealA10N/cptk
|
e500d948e91bb70661adc3c2539b149704c734a1
|
[
"Apache-2.0"
] | 39
|
2021-12-24T16:35:07.000Z
|
2022-03-18T23:15:14.000Z
|
cptk/defaults/templates/py/.cptk/template/{{slug(problem.name)}}.py
|
RealA10N/cptk
|
e500d948e91bb70661adc3c2539b149704c734a1
|
[
"Apache-2.0"
] | 2
|
2022-01-12T19:13:20.000Z
|
2022-01-12T19:32:05.000Z
|
# {% if user is defined %}{{ user }} - {% endif %}{{ now.ctime() }}
# {{ problem.name }} ({{ problem.url }})
# Generated by cptk
| 32.25
| 67
| 0.527132
| 15
| 129
| 4.533333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20155
| 129
| 3
| 68
| 43
| 0.660194
| 0.945736
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
421cfb3741803259431a83a0e3291b85236da645
| 2,164
|
py
|
Python
|
tests/test___init__.py
|
yukihiko-shinoda/yaml-dataclass-config-test
|
24e49cf0992e37a991cd8e505507881b14e94895
|
[
"MIT"
] | 26
|
2019-08-21T00:42:03.000Z
|
2021-09-22T15:25:59.000Z
|
tests/test___init__.py
|
yukihiko-shinoda/yaml-dataclass-config-test
|
24e49cf0992e37a991cd8e505507881b14e94895
|
[
"MIT"
] | 5
|
2020-06-04T19:04:43.000Z
|
2021-05-19T17:27:22.000Z
|
tests/test___init__.py
|
yukihiko-shinoda/yaml-dataclass-config-test
|
24e49cf0992e37a991cd8e505507881b14e94895
|
[
"MIT"
] | 5
|
2020-04-17T15:18:16.000Z
|
2021-07-16T19:24:13.000Z
|
"""Tests for __init__ module."""
import os
from pathlib import Path
from typing import Union
import pytest # type: ignore
from yamldataclassconfig import build_path
class TestFunctions:
"""Tests for functions."""
@staticmethod
def test_create_file_path_field():
"""TODO """
@staticmethod
@pytest.mark.parametrize(
"argument, expected",
[
("config.yml", Path(os.getcwd()) / "config.yml"),
("config_a.yml", Path(os.getcwd()) / "config_a.yml"),
("../config.yml", Path(os.getcwd()) / "../config.yml"),
("../config_a.yml", Path(os.getcwd()) / "../config_a.yml"),
(Path("config.yml"), Path(os.getcwd()) / "config.yml"),
(Path("config_a.yml"), Path(os.getcwd()) / "config_a.yml"),
(Path("../config.yml"), Path(os.getcwd()) / "../config.yml"),
(Path("../config_a.yml"), Path(os.getcwd()) / "../config_a.yml"),
],
)
def test_build_path_relative(argument: Union[Path, str], expected: Path):
"""Function should return Path object by argument as relative path."""
assert build_path(argument) == expected
@staticmethod
@pytest.mark.parametrize(
"argument, expected",
[
(f"{os.getcwd()}/config.yml", Path(os.getcwd()) / "config.yml"),
(f"{os.getcwd()}/config_a.yml", Path(os.getcwd()) / "config_a.yml"),
(f"{os.getcwd()}/../config.yml", Path(os.getcwd()) / "../config.yml"),
(f"{os.getcwd()}/../config_a.yml", Path(os.getcwd()) / "../config_a.yml"),
(Path(os.getcwd()) / "config.yml", Path(os.getcwd()) / "config.yml"),
(Path(os.getcwd()) / "config_a.yml", Path(os.getcwd()) / "config_a.yml"),
(Path(os.getcwd()) / "../config.yml", Path(os.getcwd()) / "../config.yml"),
(Path(os.getcwd()) / "../config_a.yml", Path(os.getcwd()) / "../config_a.yml"),
],
)
def test_build_path_absolute(argument: Union[Path, str], expected: Path):
"""Function should return Path object by argument as absolute path."""
assert build_path(argument, True) == expected
| 40.830189
| 91
| 0.562847
| 257
| 2,164
| 4.610895
| 0.178988
| 0.141772
| 0.283544
| 0.253165
| 0.783122
| 0.737553
| 0.654852
| 0.654852
| 0.654852
| 0.654852
| 0
| 0
| 0.227357
| 2,164
| 52
| 92
| 41.615385
| 0.708732
| 0.091035
| 0
| 0.225
| 0
| 0
| 0.253478
| 0.054611
| 0
| 0
| 0
| 0.019231
| 0.05
| 1
| 0.075
| false
| 0
| 0.125
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4242d82170dc60407173b8e0f6790ea67ba6f3eb
| 135
|
py
|
Python
|
MFP/__init__.py
|
Shoeboxam/Neural_Network
|
61da4c2e4f6603a08042612d5ff2fe334ee7b20f
|
[
"MIT"
] | 3
|
2017-03-11T07:21:46.000Z
|
2017-09-01T20:12:06.000Z
|
MFP/__init__.py
|
Shoeboxam/Neural_Network
|
61da4c2e4f6603a08042612d5ff2fe334ee7b20f
|
[
"MIT"
] | null | null | null |
MFP/__init__.py
|
Shoeboxam/Neural_Network
|
61da4c2e4f6603a08042612d5ff2fe334ee7b20f
|
[
"MIT"
] | null | null | null |
from .Network import MFP
from .Optimize.Backpropagation import *
from .Optimize.EvolutionaryAlgorithm import *
from .Function import *
| 27
| 45
| 0.814815
| 15
| 135
| 7.333333
| 0.533333
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118519
| 135
| 4
| 46
| 33.75
| 0.92437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
42ab5961e2801a0922fb836830749ef48705548a
| 104
|
py
|
Python
|
constants.py
|
LuisFeliciano/recommendation-system
|
f35059cbbaadd6d000c7505b89472113213b8577
|
[
"MIT"
] | null | null | null |
constants.py
|
LuisFeliciano/recommendation-system
|
f35059cbbaadd6d000c7505b89472113213b8577
|
[
"MIT"
] | null | null | null |
constants.py
|
LuisFeliciano/recommendation-system
|
f35059cbbaadd6d000c7505b89472113213b8577
|
[
"MIT"
] | null | null | null |
import os
from os import environ
# MongoDB Connection link
CONNECTION_LINK = environ['CONNECTION_LINK']
| 20.8
| 44
| 0.817308
| 14
| 104
| 5.928571
| 0.5
| 0.506024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 104
| 5
| 44
| 20.8
| 0.912088
| 0.221154
| 0
| 0
| 0
| 0
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c404adcc4a593e68f9220efa397e715bbcc396ac
| 153
|
py
|
Python
|
music/admin.py
|
12DReflections/django2
|
a0176331bac714d6dcbb3e736330eead6bc54314
|
[
"MIT"
] | null | null | null |
music/admin.py
|
12DReflections/django2
|
a0176331bac714d6dcbb3e736330eead6bc54314
|
[
"MIT"
] | null | null | null |
music/admin.py
|
12DReflections/django2
|
a0176331bac714d6dcbb3e736330eead6bc54314
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from music.models import Album, Song
admin.site.register(Album)
admin.site.register(Song)
| 21.857143
| 36
| 0.803922
| 23
| 153
| 5.347826
| 0.565217
| 0.146341
| 0.276423
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 153
| 7
| 37
| 21.857143
| 0.904412
| 0.169935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c41fc003bbbae306180d6a60b76df2d976339907
| 190
|
py
|
Python
|
tests/web_platform/css_flexbox_1/flex_lines/test_multi_line_wrap_reverse_column_reverse.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 71
|
2015-04-13T09:44:14.000Z
|
2019-03-24T01:03:02.000Z
|
tests/web_platform/css_flexbox_1/flex_lines/test_multi_line_wrap_reverse_column_reverse.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 35
|
2019-05-06T15:26:09.000Z
|
2022-03-28T06:30:33.000Z
|
tests/web_platform/css_flexbox_1/flex_lines/test_multi_line_wrap_reverse_column_reverse.py
|
jonboland/colosseum
|
cbf974be54fd7f6fddbe7285704cfaf7a866c5c5
|
[
"BSD-3-Clause"
] | 139
|
2015-05-30T18:37:43.000Z
|
2019-03-27T17:14:05.000Z
|
from tests.utils import W3CTestCase
class TestMultiLineWrapReverseColumnReverse(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'multi-line-wrap-reverse-column-reverse'))
| 31.666667
| 93
| 0.821053
| 20
| 190
| 7.55
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017045
| 0.073684
| 190
| 5
| 94
| 38
| 0.840909
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c4212918532c69a179ed4d7d073a97468367316e
| 73
|
py
|
Python
|
flyqma/measurement/__init__.py
|
sbernasek/flyqma
|
4a622c3f5fed4456c3b9240f5a96428789fde9bd
|
[
"MIT"
] | 2
|
2020-02-22T09:53:15.000Z
|
2020-02-24T19:01:24.000Z
|
flyqma/measurement/__init__.py
|
sbernasek/flyqma
|
4a622c3f5fed4456c3b9240f5a96428789fde9bd
|
[
"MIT"
] | null | null | null |
flyqma/measurement/__init__.py
|
sbernasek/flyqma
|
4a622c3f5fed4456c3b9240f5a96428789fde9bd
|
[
"MIT"
] | null | null | null |
from .segmentation import Segmentation
from .measure import Measurements
| 24.333333
| 38
| 0.863014
| 8
| 73
| 7.875
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 73
| 2
| 39
| 36.5
| 0.969231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c422efa8a55cdf28d22ca675129b70bc44d7863b
| 1,319
|
py
|
Python
|
RemoveOldConfigs.py
|
ducksluck/SAC_NORDVPN
|
54c802d014673b87da3e92c5267d4ad02d02d249
|
[
"MIT"
] | null | null | null |
RemoveOldConfigs.py
|
ducksluck/SAC_NORDVPN
|
54c802d014673b87da3e92c5267d4ad02d02d249
|
[
"MIT"
] | null | null | null |
RemoveOldConfigs.py
|
ducksluck/SAC_NORDVPN
|
54c802d014673b87da3e92c5267d4ad02d02d249
|
[
"MIT"
] | null | null | null |
import os
def RemoveOldConfigs(scope, configPoolDir, colors):
# Delete All Old Configs
if scope == "All":
for filename in os.listdir(configPoolDir):
path = os.path.join(configPoolDir, filename)
try:
if filename[-5:] == ".ovpn":
os.remove(path)
except Exception as e:
print('\x1b[%sm Delete Failed %s. Error: %s \x1b[0m' % (colors['red_box'], path, e))
# Delete Double VPNs
elif "-" in scope:
for filename in os.listdir(configPoolDir):
path = os.path.join(configPoolDir, filename)
try:
if filename[-5:] == ".ovpn" and "-" in filename:
os.remove(path)
except Exception as e:
print('\x1b[%sm Delete Failed %s. Error: %s \x1b[0m' % (colors['red_box'], path, e))
# Delete Selected Country
else:
for filename in os.listdir(configPoolDir):
path = os.path.join(configPoolDir, filename)
try:
if filename[-5:] == ".ovpn" and filename.startswith(scope):
os.remove(path)
except Exception as e:
print('\x1b[%sm Delete Failed %s. Error: %s \x1b[0m' % (colors['red_box'], path, e))
| 38.794118
| 102
| 0.5163
| 148
| 1,319
| 4.581081
| 0.290541
| 0.048673
| 0.057522
| 0.066372
| 0.774336
| 0.774336
| 0.774336
| 0.774336
| 0.774336
| 0.774336
| 0
| 0.014184
| 0.358605
| 1,319
| 33
| 103
| 39.969697
| 0.787234
| 0.04928
| 0
| 0.692308
| 0
| 0
| 0.144618
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.038462
| 0
| 0.076923
| 0.115385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c4329ce2f49dc7468a840f4ba1592db3e6dfc436
| 492
|
py
|
Python
|
OOPS/Polymorphism/Ducktype.py
|
prathimacode-hub/PythonScripts
|
e80d472705315b55ae61205c4606c46dcbf62cfc
|
[
"MIT"
] | 5
|
2020-11-24T12:32:11.000Z
|
2020-12-08T18:14:25.000Z
|
OOPS/Polymorphism/Ducktype.py
|
prathimacode-hub/PythonScripts
|
e80d472705315b55ae61205c4606c46dcbf62cfc
|
[
"MIT"
] | 1
|
2021-02-28T10:07:10.000Z
|
2021-02-28T10:07:10.000Z
|
OOPS/Polymorphism/Ducktype.py
|
prathimacode-hub/PythonScripts
|
e80d472705315b55ae61205c4606c46dcbf62cfc
|
[
"MIT"
] | 8
|
2020-11-24T09:48:02.000Z
|
2021-05-28T15:10:12.000Z
|
class Vscode():
def execute(self):
print("code is compiling and code is running")
class Pycharm():
def execute(self):
print("code is compiling and code is running")
class python():
def execute(self):
print("python is using")
class CPP():
def execute(self):
print("C++ is using")
class Laptop():
def code(self,ide,lang):
ide.execute(self)
lang.execute(self)
lap1=Laptop()
lap1.code(Vscode,python)
lap1.code(Pycharm,CPP)
| 18.222222
| 54
| 0.623984
| 67
| 492
| 4.58209
| 0.298507
| 0.214984
| 0.18241
| 0.247557
| 0.358306
| 0.358306
| 0.358306
| 0.358306
| 0.358306
| 0.358306
| 0
| 0.008021
| 0.239837
| 492
| 26
| 55
| 18.923077
| 0.812834
| 0
| 0
| 0.315789
| 0
| 0
| 0.206544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0
| 0
| 0.526316
| 0.210526
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
c44617469e01a296a30a8467f468f5a558cd36a3
| 113
|
py
|
Python
|
tcpchan/core/__init__.py
|
frankurcrazy/tcpchan
|
705e2b9df2260b38f5164cd7d5db865c5a8fa766
|
[
"BSD-3-Clause"
] | null | null | null |
tcpchan/core/__init__.py
|
frankurcrazy/tcpchan
|
705e2b9df2260b38f5164cd7d5db865c5a8fa766
|
[
"BSD-3-Clause"
] | null | null | null |
tcpchan/core/__init__.py
|
frankurcrazy/tcpchan
|
705e2b9df2260b38f5164cd7d5db865c5a8fa766
|
[
"BSD-3-Clause"
] | null | null | null |
from .chan import *
from .conn import *
from .msg import *
__all__ = chan.__all__ + msg.__all__ + conn.__all__
| 16.142857
| 51
| 0.707965
| 16
| 113
| 4
| 0.375
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185841
| 113
| 6
| 52
| 18.833333
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c461cdb2388884edb61b2dc4267280e2d456d609
| 155
|
py
|
Python
|
ver1_0/openassembly/pirate_messages/admin.py
|
fragro/Open-Assembly
|
e9679ff5e7ae9881fa5781d763288ed2f40b014d
|
[
"BSD-3-Clause"
] | 1
|
2015-11-05T08:22:19.000Z
|
2015-11-05T08:22:19.000Z
|
ver1_0/openassembly/pirate_messages/admin.py
|
fragro/Open-Assembly
|
e9679ff5e7ae9881fa5781d763288ed2f40b014d
|
[
"BSD-3-Clause"
] | null | null | null |
ver1_0/openassembly/pirate_messages/admin.py
|
fragro/Open-Assembly
|
e9679ff5e7ae9881fa5781d763288ed2f40b014d
|
[
"BSD-3-Clause"
] | 1
|
2018-02-03T18:25:41.000Z
|
2018-02-03T18:25:41.000Z
|
from pirate_messages.models import Notification, Message
from django.contrib import admin
admin.site.register(Notification)
admin.site.register(Message)
| 22.142857
| 56
| 0.845161
| 20
| 155
| 6.5
| 0.6
| 0.138462
| 0.261538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083871
| 155
| 6
| 57
| 25.833333
| 0.915493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c47e47aef3fba78db368dacf97e5786cd0e1de94
| 100
|
py
|
Python
|
src/db/__init__.py
|
TWT233/CheeringStar
|
3f6e8a9fbeba1a368c4dd85c731b27b06c0fd850
|
[
"MIT"
] | null | null | null |
src/db/__init__.py
|
TWT233/CheeringStar
|
3f6e8a9fbeba1a368c4dd85c731b27b06c0fd850
|
[
"MIT"
] | null | null | null |
src/db/__init__.py
|
TWT233/CheeringStar
|
3f6e8a9fbeba1a368c4dd85c731b27b06c0fd850
|
[
"MIT"
] | 1
|
2021-09-19T08:50:49.000Z
|
2021-09-19T08:50:49.000Z
|
from . import models
from .init import get_db, engine
models.Base.metadata.create_all(bind=engine)
| 20
| 44
| 0.8
| 16
| 100
| 4.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11
| 100
| 4
| 45
| 25
| 0.876404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c47eeeb0cc6271fcd1fec4a9b6570e381d65d99f
| 44
|
py
|
Python
|
domain/consumption/__init__.py
|
jgasteiz/home-consumption-dashboard
|
d85dc967da66db8d9f2ec5d09bb6f14f0ca9f953
|
[
"MIT"
] | 2
|
2020-08-31T16:22:23.000Z
|
2021-09-21T16:08:17.000Z
|
domain/unit_rates/__init__.py
|
jgasteiz/home-consumption-dashboard
|
d85dc967da66db8d9f2ec5d09bb6f14f0ca9f953
|
[
"MIT"
] | 4
|
2020-04-05T08:58:47.000Z
|
2021-11-24T14:32:59.000Z
|
domain/unit_rates/__init__.py
|
jgasteiz/home-consumption-dashboard
|
d85dc967da66db8d9f2ec5d09bb6f14f0ca9f953
|
[
"MIT"
] | 1
|
2021-11-24T14:33:10.000Z
|
2021-11-24T14:33:10.000Z
|
from ._api import *
from ._queries import *
| 14.666667
| 23
| 0.727273
| 6
| 44
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 44
| 2
| 24
| 22
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c47f3115cc185e931035e7a8d1be3eb364b23c6f
| 185
|
py
|
Python
|
src/pyimpute_geometric/__init__.py
|
Mashiku7/pyimpute-geometric
|
2431b250ea4e52939ba8ce5044ab5b1693dc95ba
|
[
"BSD-3-Clause"
] | 1
|
2019-06-03T20:23:18.000Z
|
2019-06-03T20:23:18.000Z
|
src/pyimpute_geometric/__init__.py
|
Mashiku7/pyimpute_geometric
|
2431b250ea4e52939ba8ce5044ab5b1693dc95ba
|
[
"BSD-3-Clause"
] | null | null | null |
src/pyimpute_geometric/__init__.py
|
Mashiku7/pyimpute_geometric
|
2431b250ea4e52939ba8ce5044ab5b1693dc95ba
|
[
"BSD-3-Clause"
] | 1
|
2019-06-03T20:23:21.000Z
|
2019-06-03T20:23:21.000Z
|
from __future__ import absolute_import
from ._main import load_training_vector, load_training_rasters, load_targets, \
impute, stratified_sample_raster, evaluate_clf
| 37
| 79
| 0.783784
| 22
| 185
| 5.954545
| 0.727273
| 0.183206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178378
| 185
| 4
| 80
| 46.25
| 0.861842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c4870945c0ef3fa2050d782b44c8e77663532be7
| 55
|
py
|
Python
|
nighres/data/__init__.py
|
jennydaman/nighres
|
9ced74e61db02261e4753a69b03f4479bfdc26b6
|
[
"Apache-2.0"
] | null | null | null |
nighres/data/__init__.py
|
jennydaman/nighres
|
9ced74e61db02261e4753a69b03f4479bfdc26b6
|
[
"Apache-2.0"
] | null | null | null |
nighres/data/__init__.py
|
jennydaman/nighres
|
9ced74e61db02261e4753a69b03f4479bfdc26b6
|
[
"Apache-2.0"
] | null | null | null |
from nighres.data.download_data import download_7T_TRT
| 27.5
| 54
| 0.890909
| 9
| 55
| 5.111111
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019608
| 0.072727
| 55
| 1
| 55
| 55
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6709598cd2fa5fe91d269c03c600238143624582
| 378
|
py
|
Python
|
py3wirecard/entities/checkoutpreferences.py
|
robertons/py3wirecard
|
8a9b541a67ee96d75b1c864762fce7148cccb8b4
|
[
"MIT"
] | 2
|
2019-09-05T20:20:44.000Z
|
2020-01-14T18:20:45.000Z
|
py3wirecard/entities/checkoutpreferences.py
|
robertons/py3wirecard
|
8a9b541a67ee96d75b1c864762fce7148cccb8b4
|
[
"MIT"
] | 1
|
2020-01-15T12:27:56.000Z
|
2020-01-16T12:26:13.000Z
|
py3wirecard/entities/checkoutpreferences.py
|
robertons/py3wirecard
|
8a9b541a67ee96d75b1c864762fce7148cccb8b4
|
[
"MIT"
] | null | null | null |
#-*- coding: utf-8 -*-
from py3wirecard.entities.lib.wireentity import *
from py3wirecard.entities.installments import Installments
from py3wirecard.entities.redirecturls import RedirectUrls
class CheckoutPreferences(WireEntity):
@Object(type=RedirectUrls, required=True)
def redirectUrls(self):pass
@Object(type=Installments, required=True)
def installments(self):pass
| 27
| 58
| 0.809524
| 42
| 378
| 7.285714
| 0.47619
| 0.147059
| 0.22549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.089947
| 378
| 13
| 59
| 29.076923
| 0.877907
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.375
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
676b9406668bb13e025bf52bb751256b53ac6645
| 32
|
py
|
Python
|
circulo/wrappers/__init__.py
|
Lab41PaulM/Circulo
|
8a15e94cf4434b6945d41709bb33a5d25b1963d5
|
[
"Apache-2.0"
] | null | null | null |
circulo/wrappers/__init__.py
|
Lab41PaulM/Circulo
|
8a15e94cf4434b6945d41709bb33a5d25b1963d5
|
[
"Apache-2.0"
] | null | null | null |
circulo/wrappers/__init__.py
|
Lab41PaulM/Circulo
|
8a15e94cf4434b6945d41709bb33a5d25b1963d5
|
[
"Apache-2.0"
] | null | null | null |
#from circulo import algorithms
| 16
| 31
| 0.84375
| 4
| 32
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.964286
| 0.9375
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
67881c79a64c2e61906e8fe2936cd2556e1b9c9b
| 132
|
py
|
Python
|
tranquil/components/__init__.py
|
ysenarath/twespy
|
7db1604ad5497825dfc7bf20c3cc0690f1032dc5
|
[
"MIT"
] | null | null | null |
tranquil/components/__init__.py
|
ysenarath/twespy
|
7db1604ad5497825dfc7bf20c3cc0690f1032dc5
|
[
"MIT"
] | null | null | null |
tranquil/components/__init__.py
|
ysenarath/twespy
|
7db1604ad5497825dfc7bf20c3cc0690f1032dc5
|
[
"MIT"
] | null | null | null |
from tranquil.components.navbar import Navbar
from tranquil.components.chart import Chart
__all__ = [
'Chart',
'Navbar',
]
| 16.5
| 45
| 0.727273
| 15
| 132
| 6.133333
| 0.466667
| 0.26087
| 0.478261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174242
| 132
| 7
| 46
| 18.857143
| 0.844037
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
67a304297a118a1435580c14dec4b3011dc1b37d
| 110
|
py
|
Python
|
src/__init__.py
|
jameswenzel/mydy
|
2315139424ee5cd67469ba5c7d7d38496f00b70b
|
[
"MIT"
] | 7
|
2017-09-12T08:35:26.000Z
|
2020-10-05T18:58:56.000Z
|
src/__init__.py
|
jameswenzel/mydy
|
2315139424ee5cd67469ba5c7d7d38496f00b70b
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
jameswenzel/mydy
|
2315139424ee5cd67469ba5c7d7d38496f00b70b
|
[
"MIT"
] | 2
|
2017-09-24T01:06:07.000Z
|
2019-06-25T06:21:34.000Z
|
from . import Containers
from . import Constants
from . import Events
from . import FileIO
from . import Util
| 18.333333
| 24
| 0.772727
| 15
| 110
| 5.666667
| 0.466667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 110
| 5
| 25
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
67be6db50206f5b1572991dd70f21bbf54c71cf1
| 1,564
|
py
|
Python
|
bots/config.py
|
FernandoC1217/Twitter_Bots
|
df1bc8a16db3d5fe9fd99280ca3e9d5ab18102b2
|
[
"MIT"
] | null | null | null |
bots/config.py
|
FernandoC1217/Twitter_Bots
|
df1bc8a16db3d5fe9fd99280ca3e9d5ab18102b2
|
[
"MIT"
] | null | null | null |
bots/config.py
|
FernandoC1217/Twitter_Bots
|
df1bc8a16db3d5fe9fd99280ca3e9d5ab18102b2
|
[
"MIT"
] | null | null | null |
# tweepy-bots/bots/config.py
import tweepy
import logging
import os
logger = logging.getLogger()
#Add your credentials here
twitter_keys = {
'consumer_key': 'uw9LFBUG68eTQ9WbOxDqIceNr',
'consumer_secret': 'XZbN2YE6teESsBjRNY2AG9JYhbvPqmvv798rIejM4Lg9xdWmhd',
'access_token_key': '4492109894-vMXUD1of3x3CacjPVn4sikXEnGuNDwuwjZjVn2d',
'access_token_secret': 'xDm3uliQkViSfKn4XgFcmXjkzhwL6w5pmcXUEhoN7C2uu'
}
#Setup access to API
auth = tweepy.OAuthHandler(twitter_keys['consumer_key'], twitter_keys['consumer_secret'])
auth.set_access_token(twitter_keys['access_token_key'], twitter_keys['access_token_secret'])
api = tweepy.API(auth)
def create_api():
# Add your credentials here
twitter_keys = {
'consumer_key': 'uw9LFBUG68eTQ9WbOxDqIceNr',
'consumer_secret': 'XZbN2YE6teESsBjRNY2AG9JYhbvPqmvv798rIejM4Lg9xdWmhd',
'access_token_key': '4492109894-vMXUD1of3x3CacjPVn4sikXEnGuNDwuwjZjVn2d',
'access_token_secret': 'xDm3uliQkViSfKn4XgFcmXjkzhwL6w5pmcXUEhoN7C2uu'
}
# Setup access to API
auth = tweepy.OAuthHandler(twitter_keys['consumer_key'], twitter_keys['consumer_secret'])
auth.set_access_token(twitter_keys['access_token_key'], twitter_keys['access_token_secret'])
api = tweepy.API(auth, wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
try:
api.verify_credentials()
except Exception as e:
logger.error("Error creating API", exc_info=True)
raise e
logger.info("API created")
return api
| 32.583333
| 96
| 0.738491
| 160
| 1,564
| 6.91875
| 0.325
| 0.099368
| 0.102981
| 0.079494
| 0.776874
| 0.776874
| 0.776874
| 0.776874
| 0.776874
| 0.776874
| 0
| 0.052147
| 0.16624
| 1,564
| 47
| 97
| 33.276596
| 0.796779
| 0.074169
| 0
| 0.451613
| 0
| 0
| 0.429367
| 0.236604
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.096774
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
67d16c10d4cbba6d54a974eb92e774316a006545
| 118
|
py
|
Python
|
app/core/crypto_market_api/__init__.py
|
LukeSamkharadze/wallet-management
|
c2efeb54676e4fe0901b6980f1ba6e5af5796271
|
[
"MIT"
] | null | null | null |
app/core/crypto_market_api/__init__.py
|
LukeSamkharadze/wallet-management
|
c2efeb54676e4fe0901b6980f1ba6e5af5796271
|
[
"MIT"
] | null | null | null |
app/core/crypto_market_api/__init__.py
|
LukeSamkharadze/wallet-management
|
c2efeb54676e4fe0901b6980f1ba6e5af5796271
|
[
"MIT"
] | null | null | null |
from typing import Protocol
class ICryptoMarketApi(Protocol):
def get_price_of_btc(self) -> float:
pass
| 16.857143
| 40
| 0.720339
| 15
| 118
| 5.466667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211864
| 118
| 6
| 41
| 19.666667
| 0.88172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
db18db5b717956b7c7aed00ba98e603b27253776
| 67
|
py
|
Python
|
main.py
|
alexdjulin/cocollage
|
2832574565f62a7e09165c0033f4fdedc724cb0d
|
[
"MIT"
] | null | null | null |
main.py
|
alexdjulin/cocollage
|
2832574565f62a7e09165c0033f4fdedc724cb0d
|
[
"MIT"
] | null | null | null |
main.py
|
alexdjulin/cocollage
|
2832574565f62a7e09165c0033f4fdedc724cb0d
|
[
"MIT"
] | null | null | null |
from ui import coco_ui
if __name__ == '__main__':
coco_ui()
| 9.571429
| 26
| 0.656716
| 10
| 67
| 3.4
| 0.7
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238806
| 67
| 6
| 27
| 11.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e1dac23503f775eca5973b9a9ad948bfc7313062
| 172
|
py
|
Python
|
aoc2019/__init__.py
|
rgooler/AOC2019
|
f761881240a5fe8711f730887f0f5033ea287e3d
|
[
"Apache-2.0"
] | null | null | null |
aoc2019/__init__.py
|
rgooler/AOC2019
|
f761881240a5fe8711f730887f0f5033ea287e3d
|
[
"Apache-2.0"
] | null | null | null |
aoc2019/__init__.py
|
rgooler/AOC2019
|
f761881240a5fe8711f730887f0f5033ea287e3d
|
[
"Apache-2.0"
] | null | null | null |
from .amplifiers import *
from .cpu import *
from .crack import *
from .map import *
from .rocket import *
from .wires import *
from .digital_sending_network import *
| 24.571429
| 38
| 0.726744
| 23
| 172
| 5.347826
| 0.478261
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19186
| 172
| 7
| 38
| 24.571429
| 0.884892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c0008689631d2da761982fcff589cd5f3539da5f
| 82
|
py
|
Python
|
TOPSIS-Elvis-101803397/__init__.py
|
ElvisSethi/TOPSIS-Elvis-101803397
|
94d91655854e383bea8af87591f289d66bb36c87
|
[
"MIT"
] | null | null | null |
TOPSIS-Elvis-101803397/__init__.py
|
ElvisSethi/TOPSIS-Elvis-101803397
|
94d91655854e383bea8af87591f289d66bb36c87
|
[
"MIT"
] | null | null | null |
TOPSIS-Elvis-101803397/__init__.py
|
ElvisSethi/TOPSIS-Elvis-101803397
|
94d91655854e383bea8af87591f289d66bb36c87
|
[
"MIT"
] | null | null | null |
import os
import pandas as pd
import math
from utilFile import TOPSIS
| 9.111111
| 28
| 0.707317
| 12
| 82
| 4.833333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.292683
| 82
| 8
| 29
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c01a8bd46d53ca64055c5bff7c762851f69b675a
| 119
|
py
|
Python
|
python_modules/dagster/dagster/core/executor/step_delegating/__init__.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
python_modules/dagster/dagster/core/executor/step_delegating/__init__.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
python_modules/dagster/dagster/core/executor/step_delegating/__init__.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
from .step_delegating_executor import StepDelegatingExecutor
from .step_handler import StepHandler, StepHandlerContext
| 39.666667
| 60
| 0.89916
| 12
| 119
| 8.666667
| 0.75
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07563
| 119
| 2
| 61
| 59.5
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c02631c78c809fbd445d1f887613b5b63a18c786
| 15
|
py
|
Python
|
tools/__init__.py
|
epopisces/template_api_wrapper
|
e581eb31f6123ca2d93803453f2a1ab25c3c1981
|
[
"MIT"
] | null | null | null |
tools/__init__.py
|
epopisces/template_api_wrapper
|
e581eb31f6123ca2d93803453f2a1ab25c3c1981
|
[
"MIT"
] | null | null | null |
tools/__init__.py
|
epopisces/template_api_wrapper
|
e581eb31f6123ca2d93803453f2a1ab25c3c1981
|
[
"MIT"
] | null | null | null |
# An init file!
| 15
| 15
| 0.666667
| 3
| 15
| 3.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 15
| 1
| 15
| 15
| 0.833333
| 0.866667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c03c3c5aa93204f9336ce05796621a28a8927358
| 12,175
|
py
|
Python
|
mumin/id_updator.py
|
MuMiN-dataset/mumin-build
|
c094a88b4f81a7ebc5d56a55cb513ae32e7b1339
|
[
"MIT"
] | 12
|
2022-02-24T13:49:40.000Z
|
2022-03-31T16:52:16.000Z
|
mumin/id_updator.py
|
CLARITI-REPHRAIN/mumin-build
|
c75358a6fe41780e039183864569a527047b1fa8
|
[
"MIT"
] | 2
|
2021-12-14T16:43:11.000Z
|
2021-12-15T11:54:32.000Z
|
mumin/id_updator.py
|
CLARITI-REPHRAIN/mumin-build
|
c75358a6fe41780e039183864569a527047b1fa8
|
[
"MIT"
] | null | null | null |
'''Class that updates the precomputed IDs'''
import pandas as pd
from typing import Dict, Tuple
class IdUpdator:
'''Class that updates the IDs of nodes and relations'''
def update_all(self,
nodes: Dict[str, pd.DataFrame],
rels: Dict[Tuple[str, str, str], pd.DataFrame]
) -> Tuple[dict, dict]:
'''Extract all node and relation data.
Args:
nodes (Dict[str, pd.DataFrame]):
A dictionary of node dataframes.
rels (Dict[Tuple[str, str, str], pd.DataFrame]):
A dictionary of relation dataframes.
Returns:
pair of dicts:
A tuple of updated node and relation dictionaries.
'''
rel = ('tweet', 'discusses', 'claim')
if rel in rels.keys():
rels[rel] = self._update_tweet_discusses_claim(
rel_df=rels[rel],
tweet_df=nodes['tweet'],
claim_df=nodes['claim']
)
rel = ('article', 'discusses', 'claim')
if rel in rels.keys():
rels[rel] = self._update_article_discusses_claim(
rel_df=rels[rel],
article_df=nodes['article'],
claim_df=nodes['claim']
)
rel = ('user', 'follows', 'user')
if rel in rels.keys():
rels[rel] = self._update_user_follows_user(
rel_df=rels[rel],
user_df=nodes['user']
)
rel = ('reply', 'reply_to', 'tweet')
if rel in rels.keys():
rels[rel] = self._update_reply_reply_to_tweet(
rel_df=rels[rel],
reply_df=nodes['reply'],
tweet_df=nodes['tweet']
)
rel = ('reply', 'quote_of', 'tweet')
if rel in rels.keys():
rels[rel] = self._update_reply_quote_of_tweet(
rel_df=rels[rel],
reply_df=nodes['reply'],
tweet_df=nodes['tweet']
)
rel = ('user', 'retweeted', 'tweet')
if rel in rels.keys():
rels[rel] = self._update_user_retweeted_tweet(
rel_df=rels[rel],
user_df=nodes['user'],
tweet_df=nodes['tweet']
)
# Remove ID columns from the claim and article dataframes
nodes['claim'] = self._remove_id_column(node_df=nodes['claim'])
if 'article' in nodes.keys():
nodes['article'] = self._remove_id_column(node_df=nodes['article'])
return nodes, rels
def _update_tweet_discusses_claim(self,
rel_df: pd.DataFrame,
tweet_df: pd.DataFrame,
claim_df: pd.DataFrame
) -> pd.DataFrame:
'''Update the (:Tweet)-[:DISCUSSES]->(:Claim) relation.
Args:
rel_df (pd.DataFrame): The relation dataframe.
tweet_df (pd.DataFrame): The tweet dataframe.
claim_df (pd.DataFrame): The claim dataframe.
Returns:
pd.DataFrame: The updated relation dataframe.
'''
if len(rel_df) > 0:
merged = (rel_df.astype(dict(src=int, tgt=int))
.merge(tweet_df[['tweet_id']]
.reset_index()
.rename(columns=dict(index='tweet_idx')),
left_on='src',
right_on='tweet_id')
.merge(claim_df[['id']]
.reset_index()
.rename(columns=dict(index='claim_idx')),
left_on='tgt',
right_on='id'))
if len(merged) > 0:
data_dict = dict(src=merged.tweet_idx.tolist(),
tgt=merged.claim_idx.tolist())
rel_df = pd.DataFrame(data_dict)
else:
rel_df = pd.DataFrame()
return rel_df
def _update_article_discusses_claim(self,
rel_df: pd.DataFrame,
article_df: pd.DataFrame,
claim_df: pd.DataFrame
) -> pd.DataFrame:
'''Update the (:Article)-[:DISCUSSES]->(:Claim) relation.
Args:
rel_df (pd.DataFrame): The relation dataframe.
article_df (pd.DataFrame): The article dataframe.
claim_df (pd.DataFrame): The claim dataframe.
Returns:
pd.DataFrame: The updated relation dataframe.
'''
if len(rel_df) > 0:
merged = (rel_df.astype(dict(src=int, tgt=int))
.merge(article_df[['id']]
.reset_index()
.rename(columns=dict(index='art_idx')),
left_on='src',
right_on='id')
.merge(claim_df[['id']]
.reset_index()
.rename(columns=dict(index='claim_idx')),
left_on='tgt',
right_on='id'))
if len(merged) > 0:
data_dict = dict(src=merged.art_idx.tolist(),
tgt=merged.claim_idx.tolist())
rel_df = pd.DataFrame(data_dict)
else:
rel_df = pd.DataFrame()
return rel_df
def _update_user_follows_user(self,
rel_df: pd.DataFrame,
user_df: pd.DataFrame) -> pd.DataFrame:
'''Update the (:User)-[:FOLLOWS]->(:User) relation.
Args:
rel_df (pd.DataFrame): The relation dataframe.
user_df (pd.DataFrame): The user dataframe.
Returns:
pd.DataFrame: The updated relation dataframe.
'''
if len(rel_df) > 0:
merged = (rel_df.astype(dict(src=int, tgt=int))
.merge(user_df[['user_id']]
.reset_index()
.rename(columns=dict(index='user_idx1')),
left_on='src',
right_on='user_id')
.merge(user_df[['user_id']]
.reset_index()
.rename(columns=dict(index='user_idx2')),
left_on='tgt',
right_on='user_id'))
if len(merged) > 0:
data_dict = dict(src=merged.user_idx1.tolist(),
tgt=merged.user_idx2.tolist())
rel_df = pd.DataFrame(data_dict)
else:
rel_df = pd.DataFrame()
return rel_df
def _update_reply_reply_to_tweet(self,
rel_df: pd.DataFrame,
reply_df: pd.DataFrame,
tweet_df: pd.DataFrame) -> pd.DataFrame:
'''Update the (:Reply)-[:REPLY_TO]->(:Tweet) relation.
Args:
rel_df (pd.DataFrame): The relation dataframe.
reply_df (pd.DataFrame): The reply dataframe.
tweet_df (pd.DataFrame): The tweet dataframe.
Returns:
pd.DataFrame: The updated relation dataframe.
'''
if len(rel_df) > 0:
merged = (rel_df.astype(dict(src=int, tgt=int))
.merge(reply_df[['tweet_id']]
.reset_index()
.rename(columns=dict(index='reply_idx')),
left_on='src',
right_on='tweet_id')
.merge(tweet_df[['tweet_id']]
.reset_index()
.rename(columns=dict(index='tweet_idx')),
left_on='tgt',
right_on='tweet_id'))
if len(merged) > 0:
data_dict = dict(src=merged.reply_idx.tolist(),
tgt=merged.tweet_idx.tolist())
rel_df = pd.DataFrame(data_dict)
else:
rel_df = pd.DataFrame()
return rel_df
def _update_reply_quote_of_tweet(self,
rel_df: pd.DataFrame,
reply_df: pd.DataFrame,
tweet_df: pd.DataFrame) -> pd.DataFrame:
'''Update the (:Reply)-[:QUOTE_OF]->(:Tweet) relation.
Args:
rel_df (pd.DataFrame): The relation dataframe.
reply_df (pd.DataFrame): The reply dataframe.
tweet_df (pd.DataFrame): The tweet dataframe.
Returns:
pd.DataFrame: The updated relation dataframe.
'''
if len(rel_df) > 0:
merged = (rel_df.astype(dict(src=int, tgt=int))
.merge(reply_df[['tweet_id']]
.reset_index()
.rename(columns=dict(index='reply_idx')),
left_on='src',
right_on='tweet_id')
.merge(tweet_df[['tweet_id']]
.reset_index()
.rename(columns=dict(index='tweet_idx')),
left_on='tgt',
right_on='tweet_id'))
if len(merged) > 0:
data_dict = dict(src=merged.reply_idx.tolist(),
tgt=merged.tweet_idx.tolist())
rel_df = pd.DataFrame(data_dict)
else:
rel_df = pd.DataFrame()
return rel_df
def _update_user_retweeted_tweet(self,
rel_df: pd.DataFrame,
user_df: pd.DataFrame,
tweet_df: pd.DataFrame) -> pd.DataFrame:
'''Update the (:User)-[:RETWEETED]->(:Tweet) relation.
Args:
rel_df (pd.DataFrame): The relation dataframe.
user_df (pd.DataFrame): The user dataframe.
tweet_df (pd.DataFrame): The tweet dataframe.
Returns:
pd.DataFrame: The updated relation dataframe.
'''
if len(rel_df) > 0:
merged = (rel_df.astype(dict(src=int, tgt=int))
.merge(user_df[['user_id']]
.reset_index()
.rename(columns=dict(index='user_idx')),
left_on='src',
right_on='user_id')
.merge(tweet_df[['tweet_id']]
.reset_index()
.rename(columns=dict(index='tweet_idx')),
left_on='tgt',
right_on='tweet_id'))
if len(merged) > 0:
data_dict = dict(src=merged.user_idx.tolist(),
tgt=merged.tweet_idx.tolist())
rel_df = pd.DataFrame(data_dict)
else:
rel_df = pd.DataFrame()
return rel_df
def _remove_id_column(self, node_df: pd.DataFrame) -> pd.DataFrame:
'''Remove the id column from the node dataframe.
Args:
node_df (pd.DataFrame): The node dataframe.
Returns:
pd.DataFrame: The node dataframe without the id column.
'''
if len(node_df) > 0:
node_df = node_df.drop(columns='id')
return node_df
| 40.31457
| 79
| 0.440493
| 1,172
| 12,175
| 4.356655
| 0.069113
| 0.142186
| 0.122209
| 0.075206
| 0.834313
| 0.78868
| 0.765374
| 0.739718
| 0.690364
| 0.67548
| 0
| 0.002564
| 0.455524
| 12,175
| 301
| 80
| 40.448505
| 0.767687
| 0.180205
| 0
| 0.697436
| 0
| 0
| 0.051466
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041026
| false
| 0
| 0.010256
| 0
| 0.097436
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
220a0fd0153f272f4b7831e84d5ade833fe3ae6c
| 249
|
py
|
Python
|
spike_swarm_sim/neural_networks/utils/__init__.py
|
Robolabo/EvoSwarmSim
|
45f20f00b079a9481e324e091c46040182cf1d3c
|
[
"MIT"
] | null | null | null |
spike_swarm_sim/neural_networks/utils/__init__.py
|
Robolabo/EvoSwarmSim
|
45f20f00b079a9481e324e091c46040182cf1d3c
|
[
"MIT"
] | null | null | null |
spike_swarm_sim/neural_networks/utils/__init__.py
|
Robolabo/EvoSwarmSim
|
45f20f00b079a9481e324e091c46040182cf1d3c
|
[
"MIT"
] | null | null | null |
import logging
from .monitor import NeuralNetMonitor
from .builder import SynapsesBuilder
try:
from .visualization import *
except:
logging.warning('Visualization Module cannot be loaded. Running without it')
from .utils import *
| 27.666667
| 81
| 0.75502
| 28
| 249
| 6.714286
| 0.678571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188755
| 249
| 9
| 82
| 27.666667
| 0.930693
| 0
| 0
| 0
| 0
| 0
| 0.235537
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.625
| 0
| 0.625
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
220d4ae8a971be2f5eb6c5c0483a18cfbd77fe68
| 79
|
py
|
Python
|
test/__init__.py
|
yuto51942/python-template
|
db243271f3be6ae0c8e6bd7cde0dda2de3696eb1
|
[
"MIT"
] | 1
|
2020-12-25T04:25:29.000Z
|
2020-12-25T04:25:29.000Z
|
test/__init__.py
|
yuto51942/python-template
|
db243271f3be6ae0c8e6bd7cde0dda2de3696eb1
|
[
"MIT"
] | null | null | null |
test/__init__.py
|
yuto51942/python-template
|
db243271f3be6ae0c8e6bd7cde0dda2de3696eb1
|
[
"MIT"
] | null | null | null |
"""
@author Yuto Watanabe
@version 1.0.0
Copyright (c) 2020 Yuto Watanabe
"""
| 11.285714
| 32
| 0.683544
| 12
| 79
| 4.5
| 0.75
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106061
| 0.164557
| 79
| 6
| 33
| 13.166667
| 0.712121
| 0.886076
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2254f26af951d71606e8e25a8576884ed9aec4ae
| 124
|
py
|
Python
|
core/admin.py
|
aalmobin/Super_shop_management_system
|
4392b05ba4acfaea730854e15efdc3ac68f9b880
|
[
"MIT"
] | null | null | null |
core/admin.py
|
aalmobin/Super_shop_management_system
|
4392b05ba4acfaea730854e15efdc3ac68f9b880
|
[
"MIT"
] | null | null | null |
core/admin.py
|
aalmobin/Super_shop_management_system
|
4392b05ba4acfaea730854e15efdc3ac68f9b880
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import *
admin.site.register([Category, Product, Cart, CartProduct, Order])
| 20.666667
| 66
| 0.774194
| 16
| 124
| 6
| 0.8125
| 0.229167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120968
| 124
| 5
| 67
| 24.8
| 0.880734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
225a772683b26707ff53369716ef5239fc5f0c30
| 131
|
py
|
Python
|
sales/helpers/exceptions.py
|
marcelomoraes28/real-state-marketplace
|
bcbc91b61f0f0602e87fc240994e26fbf945f212
|
[
"MIT"
] | 1
|
2019-02-18T19:44:31.000Z
|
2019-02-18T19:44:31.000Z
|
sales/helpers/exceptions.py
|
marcelomoraes28/real-state-marketplace
|
bcbc91b61f0f0602e87fc240994e26fbf945f212
|
[
"MIT"
] | 3
|
2020-02-11T23:45:00.000Z
|
2021-06-10T21:09:33.000Z
|
sales/helpers/exceptions.py
|
marcelomoraes28/real-state-marketplace
|
bcbc91b61f0f0602e87fc240994e26fbf945f212
|
[
"MIT"
] | null | null | null |
class HDateException(Exception):
pass
class HMoneyException(Exception):
pass
class Dict_Exception(Exception):
pass
| 11.909091
| 33
| 0.740458
| 13
| 131
| 7.384615
| 0.461538
| 0.40625
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.19084
| 131
| 10
| 34
| 13.1
| 0.90566
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
2272ddaba38787ac7d71cd4ac25f42bf12bab1f5
| 97
|
py
|
Python
|
dependency_load_error.py
|
Lustidrike/Economy-Bot
|
f5b989b9086d9c0834555126c275d12381dd108f
|
[
"MIT"
] | 2
|
2019-09-24T21:44:00.000Z
|
2019-09-24T21:44:17.000Z
|
dependency_load_error.py
|
Lustidrike/Economy-Bot
|
f5b989b9086d9c0834555126c275d12381dd108f
|
[
"MIT"
] | 1
|
2021-03-31T14:35:11.000Z
|
2021-11-08T17:48:47.000Z
|
dependency_load_error.py
|
Lustidrike/Economy-Bot
|
f5b989b9086d9c0834555126c275d12381dd108f
|
[
"MIT"
] | 1
|
2020-08-16T16:59:57.000Z
|
2020-08-16T16:59:57.000Z
|
from discord.ext.commands import CommandError
class DependencyLoadError(CommandError):
pass
| 19.4
| 45
| 0.824742
| 10
| 97
| 8
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 97
| 4
| 46
| 24.25
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
97e7a4cd0be97ec6774a12cf74bcb56a9c08f60d
| 175
|
py
|
Python
|
bin/iamonds/polyiamonds-12345-peanut.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/iamonds/polyiamonds-12345-peanut.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/iamonds/polyiamonds-12345-peanut.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | 1
|
2022-01-02T16:54:14.000Z
|
2022-01-02T16:54:14.000Z
|
#!/usr/bin/env python
# $Id$
"""240,892 solutions"""
import puzzler
from puzzler.puzzles.polyiamonds12345 import Polyiamonds12345Peanut
puzzler.run(Polyiamonds12345Peanut)
| 17.5
| 67
| 0.794286
| 19
| 175
| 7.315789
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 0.091429
| 175
| 9
| 68
| 19.444444
| 0.742138
| 0.245714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.